diff --git a/api/Pipfile b/api/Pipfile index 1eef32a7d..ffd5906f5 100644 --- a/api/Pipfile +++ b/api/Pipfile @@ -4,21 +4,24 @@ verify_ssl = true name = "pypi" [packages] -urllib3 = "==1.26.16" +sqlparse = "==0.5.2" +urllib3 = "==2.2.3" requests = "==2.32.3" -boto3 = "==1.35.60" -pyjwt = "==2.9.0" +boto3 = "==1.35.76" +pyjwt = "==2.10.1" psycopg2-binary = "==2.9.10" -psycopg = {extras = ["pool", "binary"], version = "==3.2.3"} +psycopg = {extras = ["binary", "pool"], version = "==3.2.3"} +clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} +clickhouse-connect = "==0.8.9" elasticsearch = "==8.16.0" jira = "==3.8.0" cachetools = "==5.5.0" -fastapi = "==0.115.5" -uvicorn = {extras = ["standard"], version = "==0.32.0"} +fastapi = "==0.115.6" +uvicorn = {extras = ["standard"], version = "==0.32.1"} python-decouple = "==3.8" -pydantic = {extras = ["email"], version = "==2.9.2"} -apscheduler = "==3.10.4" -redis = "==5.2.0" +pydantic = {extras = ["email"], version = "==2.10.3"} +apscheduler = "==3.11.0" +redis = "==5.2.1" [dev-packages] diff --git a/api/app.py b/api/app.py index 41775206e..d7e5215a5 100644 --- a/api/app.py +++ b/api/app.py @@ -13,17 +13,16 @@ from psycopg.rows import dict_row from starlette.responses import StreamingResponse from chalicelib.utils import helper -from chalicelib.utils import pg_client +from chalicelib.utils import pg_client, ch_client from crons import core_crons, core_dynamic_crons from routers import core, core_dynamic -from routers.subs import insights, metrics, v1_api, health, usability_tests, spot +from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics loglevel = config("LOGLEVEL", default=logging.WARNING) print(f">Loglevel set to: {loglevel}") logging.basicConfig(level=loglevel) - class ORPYAsyncConnection(AsyncConnection): def __init__(self, *args, **kwargs): @@ -39,6 +38,7 @@ async def lifespan(app: FastAPI): app.schedule = AsyncIOScheduler() await pg_client.init() + await ch_client.init() app.schedule.start() for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: @@ -128,3 +128,7 @@ app.include_router(usability_tests.app_apikey) app.include_router(spot.public_app) app.include_router(spot.app) app.include_router(spot.app_apikey) + +app.include_router(product_anaytics.public_app) +app.include_router(product_anaytics.app) +app.include_router(product_anaytics.app_apikey) diff --git a/api/app_alerts.py b/api/app_alerts.py index 9587048dd..863fb3967 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -5,7 +5,7 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config from fastapi import FastAPI -from chalicelib.core import alerts_processor +from chalicelib.core.alerts import alerts_processor from chalicelib.utils import pg_client diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index fd4d145b1..2e30e6975 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -45,8 +45,6 @@ class JWTAuth(HTTPBearer): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials) - logger.info("------ jwt_payload ------") - logger.info(jwt_payload) auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1), jwt_iat=jwt_payload.get("iat", 100)) if jwt_payload is None \ @@ -120,8 +118,7 @@ class JWTAuth(HTTPBearer): jwt_payload = None else: jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=request.cookies["spotRefreshToken"]) - logger.info("__process_spot_refresh_call") - logger.info(jwt_payload) + if jwt_payload is None or jwt_payload.get("jti") is None: logger.warning("Null spotRefreshToken's payload, or null JTI.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, diff --git a/api/chalicelib/core/alerts/__init__.py b/api/chalicelib/core/alerts/__init__.py new file mode 100644 index 000000000..fad7c108a --- /dev/null +++ b/api/chalicelib/core/alerts/__init__.py @@ -0,0 +1,10 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) +if config("EXP_ALERTS", cast=bool, default=False): + logging.info(">>> Using experimental alerts") + from . import alerts_processor_ch as alerts_processor +else: + from . import alerts_processor as alerts_processor diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts/alerts.py similarity index 98% rename from api/chalicelib/core/alerts.py rename to api/chalicelib/core/alerts/alerts.py index 92df6e551..8fc0c8877 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts/alerts.py @@ -7,8 +7,8 @@ from decouple import config import schemas from chalicelib.core import notifications, webhook -from chalicelib.core.collaboration_msteams import MSTeams -from chalicelib.core.collaboration_slack import Slack +from chalicelib.core.collaborations.collaboration_msteams import MSTeams +from chalicelib.core.collaborations.collaboration_slack import Slack from chalicelib.utils import pg_client, helper, email_helper, smtp from chalicelib.utils.TimeUTC import TimeUTC diff --git a/api/chalicelib/core/alerts_listener.py b/api/chalicelib/core/alerts/alerts_listener.py similarity index 93% rename from api/chalicelib/core/alerts_listener.py rename to api/chalicelib/core/alerts/alerts_listener.py index e8ab9d4eb..669982f7c 100644 --- a/api/chalicelib/core/alerts_listener.py +++ b/api/chalicelib/core/alerts/alerts_listener.py @@ -1,9 +1,10 @@ +from chalicelib.core.alerts.modules import TENANT_ID from chalicelib.utils import pg_client, helper def get_all_alerts(): with pg_client.PostgresClient(long_query=True) as cur: - query = """SELECT -1 AS tenant_id, + query = f"""SELECT {TENANT_ID} AS tenant_id, alert_id, projects.project_id, projects.name AS project_name, diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts/alerts_processor.py similarity index 75% rename from api/chalicelib/core/alerts_processor.py rename to api/chalicelib/core/alerts/alerts_processor.py index 1735a64ca..f07b4984e 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts/alerts_processor.py @@ -1,16 +1,15 @@ -import decimal import logging from pydantic_core._pydantic_core import ValidationError import schemas -from chalicelib.core import alerts -from chalicelib.core import alerts_listener -from chalicelib.core import sessions +from chalicelib.core.alerts import alerts, alerts_listener +from chalicelib.core.alerts.modules import sessions, alert_helpers from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC logger = logging.getLogger(__name__) + LeftToDb = { schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", @@ -46,35 +45,6 @@ LeftToDb = { "formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False}, } -# This is the frequency of execution for each threshold -TimeInterval = { - 15: 3, - 30: 5, - 60: 10, - 120: 20, - 240: 30, - 1440: 60, -} - - -def can_check(a) -> bool: - now = TimeUTC.now() - - repetitionBase = a["options"]["currentPeriod"] \ - if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \ - and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \ - else a["options"]["previousPeriod"] - - if TimeInterval.get(repetitionBase) is None: - logger.error(f"repetitionBase: {repetitionBase} NOT FOUND") - return False - - return (a["options"]["renotifyInterval"] <= 0 or - a["options"].get("lastNotification") is None or - a["options"]["lastNotification"] <= 0 or - ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \ - and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 - def Build(a): now = TimeUTC.now() @@ -165,7 +135,7 @@ def process(): all_alerts = alerts_listener.get_all_alerts() with pg_client.PostgresClient() as cur: for alert in all_alerts: - if can_check(alert): + if alert_helpers.can_check(alert): query, params = Build(alert) try: query = cur.mogrify(query, params) @@ -181,7 +151,7 @@ def process(): result = cur.fetchone() if result["valid"]: logger.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}") - notifications.append(generate_notification(alert, result)) + notifications.append(alert_helpers.generate_notification(alert, result)) except Exception as e: logger.error( f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") @@ -195,42 +165,3 @@ def process(): WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])})) if len(notifications) > 0: alerts.process_notifications(notifications) - - -def __format_value(x): - if x % 1 == 0: - x = int(x) - else: - x = round(x, 2) - return f"{x:,}" - - -def generate_notification(alert, result): - left = __format_value(result['value']) - right = __format_value(alert['query']['right']) - return { - "alertId": alert["alertId"], - "tenantId": alert["tenantId"], - "title": alert["name"], - "description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).", - "buttonText": "Check metrics for more details", - "buttonUrl": f"/{alert['projectId']}/metrics", - "imageUrl": None, - "projectId": alert["projectId"], - "projectName": alert["projectName"], - "options": {"source": "ALERT", "sourceId": alert["alertId"], - "sourceMeta": alert["detectionMethod"], - "message": alert["options"]["message"], "projectId": alert["projectId"], - "data": {"title": alert["name"], - "limitValue": alert["query"]["right"], - "actualValue": float(result["value"]) \ - if isinstance(result["value"], decimal.Decimal) \ - else result["value"], - "operator": alert["query"]["operator"], - "trigger": alert["query"]["left"], - "alertId": alert["alertId"], - "detectionMethod": alert["detectionMethod"], - "currentPeriod": alert["options"]["currentPeriod"], - "previousPeriod": alert["options"]["previousPeriod"], - "createdAt": TimeUTC.now()}}, - } diff --git a/ee/api/chalicelib/core/alerts_processor_exp.py b/api/chalicelib/core/alerts/alerts_processor_ch.py similarity index 96% rename from ee/api/chalicelib/core/alerts_processor_exp.py rename to api/chalicelib/core/alerts/alerts_processor_ch.py index 13e047206..3b8266701 100644 --- a/ee/api/chalicelib/core/alerts_processor_exp.py +++ b/api/chalicelib/core/alerts/alerts_processor_ch.py @@ -3,9 +3,8 @@ import logging from pydantic_core._pydantic_core import ValidationError import schemas -from chalicelib.core import alerts -from chalicelib.core import alerts_listener, alerts_processor -from chalicelib.core import sessions_exp as sessions +from chalicelib.core.alerts import alerts, alerts_listener +from chalicelib.core.alerts.modules import sessions, alert_helpers from chalicelib.utils import pg_client, ch_client, exp_ch_helper from chalicelib.utils.TimeUTC import TimeUTC @@ -162,7 +161,7 @@ def process(): for alert in all_alerts: if alert["query"]["left"] != "CUSTOM": continue - if alerts_processor.can_check(alert): + if alert_helpers.can_check(alert): query, params = Build(alert) try: query = ch_cur.format(query, params) @@ -180,7 +179,7 @@ def process(): if result["valid"]: logger.info("Valid alert, notifying users") - notifications.append(alerts_processor.generate_notification(alert, result)) + notifications.append(alert_helpers.generate_notification(alert, result)) except Exception as e: logger.error(f"!!!Error while running alert query for alertId:{alert['alertId']}") logger.error(str(e)) diff --git a/api/chalicelib/core/alerts/modules/__init__.py b/api/chalicelib/core/alerts/modules/__init__.py new file mode 100644 index 000000000..52fc3f0c0 --- /dev/null +++ b/api/chalicelib/core/alerts/modules/__init__.py @@ -0,0 +1,9 @@ +from decouple import config + +TENANT_ID = "-1" +if config("EXP_ALERTS", cast=bool, default=False): + from chalicelib.core.sessions import sessions_ch as sessions +else: + from chalicelib.core.sessions import sessions + +from . import helpers as alert_helpers diff --git a/api/chalicelib/core/alerts/modules/helpers.py b/api/chalicelib/core/alerts/modules/helpers.py new file mode 100644 index 000000000..118733311 --- /dev/null +++ b/api/chalicelib/core/alerts/modules/helpers.py @@ -0,0 +1,74 @@ +import decimal +import logging + +import schemas +from chalicelib.utils.TimeUTC import TimeUTC + +logger = logging.getLogger(__name__) +# This is the frequency of execution for each threshold +TimeInterval = { + 15: 3, + 30: 5, + 60: 10, + 120: 20, + 240: 30, + 1440: 60, +} + + +def __format_value(x): + if x % 1 == 0: + x = int(x) + else: + x = round(x, 2) + return f"{x:,}" + + +def can_check(a) -> bool: + now = TimeUTC.now() + + repetitionBase = a["options"]["currentPeriod"] \ + if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \ + and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \ + else a["options"]["previousPeriod"] + + if TimeInterval.get(repetitionBase) is None: + logger.error(f"repetitionBase: {repetitionBase} NOT FOUND") + return False + + return (a["options"]["renotifyInterval"] <= 0 or + a["options"].get("lastNotification") is None or + a["options"]["lastNotification"] <= 0 or + ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \ + and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 + + +def generate_notification(alert, result): + left = __format_value(result['value']) + right = __format_value(alert['query']['right']) + return { + "alertId": alert["alertId"], + "tenantId": alert["tenantId"], + "title": alert["name"], + "description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).", + "buttonText": "Check metrics for more details", + "buttonUrl": f"/{alert['projectId']}/metrics", + "imageUrl": None, + "projectId": alert["projectId"], + "projectName": alert["projectName"], + "options": {"source": "ALERT", "sourceId": alert["alertId"], + "sourceMeta": alert["detectionMethod"], + "message": alert["options"]["message"], "projectId": alert["projectId"], + "data": {"title": alert["name"], + "limitValue": alert["query"]["right"], + "actualValue": float(result["value"]) \ + if isinstance(result["value"], decimal.Decimal) \ + else result["value"], + "operator": alert["query"]["operator"], + "trigger": alert["query"]["left"], + "alertId": alert["alertId"], + "detectionMethod": alert["detectionMethod"], + "currentPeriod": alert["options"]["currentPeriod"], + "previousPeriod": alert["options"]["previousPeriod"], + "createdAt": TimeUTC.now()}}, + } diff --git a/api/chalicelib/core/autocomplete/__init__.py b/api/chalicelib/core/autocomplete/__init__.py new file mode 100644 index 000000000..0186c2f29 --- /dev/null +++ b/api/chalicelib/core/autocomplete/__init__.py @@ -0,0 +1,11 @@ +import logging + +from decouple import config + +logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) + +if config("EXP_AUTOCOMPLETE", cast=bool, default=False): + logging.info(">>> Using experimental autocomplete") + from . import autocomplete_ch as autocomplete +else: + from . import autocomplete diff --git a/api/chalicelib/core/autocomplete.py b/api/chalicelib/core/autocomplete/autocomplete.py similarity index 98% rename from api/chalicelib/core/autocomplete.py rename to api/chalicelib/core/autocomplete/autocomplete.py index 9e4a18b0c..f80059f37 100644 --- a/api/chalicelib/core/autocomplete.py +++ b/api/chalicelib/core/autocomplete/autocomplete.py @@ -61,11 +61,11 @@ def __get_autocomplete_table(value, project_id): try: cur.execute(query) except Exception as err: - print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") - print(query.decode('UTF-8')) - print("--------- VALUE -----------") - print(value) - print("--------------------") + logger.exception("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") + logger.exception(query.decode('UTF-8')) + logger.exception("--------- VALUE -----------") + logger.exception(value) + logger.exception("--------------------") raise err results = cur.fetchall() for r in results: diff --git a/ee/api/chalicelib/core/autocomplete_exp.py b/api/chalicelib/core/autocomplete/autocomplete_ch.py similarity index 97% rename from ee/api/chalicelib/core/autocomplete_exp.py rename to api/chalicelib/core/autocomplete/autocomplete_ch.py index b1306ad52..b2cda416a 100644 --- a/ee/api/chalicelib/core/autocomplete_exp.py +++ b/api/chalicelib/core/autocomplete/autocomplete_ch.py @@ -1,3 +1,4 @@ +import logging import schemas from chalicelib.core import countries, events, metadata from chalicelib.utils import ch_client @@ -5,6 +6,7 @@ from chalicelib.utils import helper, exp_ch_helper from chalicelib.utils.event_filter_definition import Event from chalicelib.utils.or_cache import CachedResponse +logger = logging.getLogger(__name__) TABLE = "experimental.autocomplete" @@ -59,13 +61,13 @@ def __get_autocomplete_table(value, project_id): try: results = cur.execute(query=query, params=params) except Exception as err: - print("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") - print(cur.format(query=query, params=params)) - print("--------- PARAMS -----------") - print(params) - print("--------- VALUE -----------") - print(value) - print("--------------------") + logger.exception("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------") + logger.exception(cur.format(query=query, params=params)) + logger.exception("--------- PARAMS -----------") + logger.exception(params) + logger.exception("--------- VALUE -----------") + logger.exception(value) + logger.exception("--------------------") raise err for r in results: r["type"] = r.pop("_type") diff --git a/api/chalicelib/core/boarding.py b/api/chalicelib/core/boarding.py index 68843b2f8..99a93a645 100644 --- a/api/chalicelib/core/boarding.py +++ b/api/chalicelib/core/boarding.py @@ -1,5 +1,6 @@ from chalicelib.utils import pg_client -from chalicelib.core import projects, log_tool_datadog, log_tool_stackdriver, log_tool_sentry +from chalicelib.core import projects +from chalicelib.core.log_tools import datadog, stackdriver, sentry from chalicelib.core import users @@ -49,9 +50,9 @@ def get_state(tenant_id): "done": len(users.get_members(tenant_id=tenant_id)) > 1, "URL": "https://app.openreplay.com/client/manage-users"}, {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, + "done": len(datadog.get_all(tenant_id=tenant_id)) > 0 \ + or len(sentry.get_all(tenant_id=tenant_id)) > 0 \ + or len(stackdriver.get_all(tenant_id=tenant_id)) > 0, "URL": "https://docs.openreplay.com/integrations"} ] @@ -108,7 +109,7 @@ def get_state_manage_users(tenant_id): def get_state_integrations(tenant_id): return {"task": "Integrations", - "done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \ - or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0, + "done": len(datadog.get_all(tenant_id=tenant_id)) > 0 \ + or len(sentry.get_all(tenant_id=tenant_id)) > 0 \ + or len(stackdriver.get_all(tenant_id=tenant_id)) > 0, "URL": "https://docs.openreplay.com/integrations"} diff --git a/api/chalicelib/core/collaborations/__init__.py b/api/chalicelib/core/collaborations/__init__.py new file mode 100644 index 000000000..96b67383d --- /dev/null +++ b/api/chalicelib/core/collaborations/__init__.py @@ -0,0 +1 @@ +from . import collaboration_base as _ diff --git a/api/chalicelib/core/collaboration_base.py b/api/chalicelib/core/collaborations/collaboration_base.py similarity index 100% rename from api/chalicelib/core/collaboration_base.py rename to api/chalicelib/core/collaborations/collaboration_base.py diff --git a/api/chalicelib/core/collaboration_msteams.py b/api/chalicelib/core/collaborations/collaboration_msteams.py similarity index 98% rename from api/chalicelib/core/collaboration_msteams.py rename to api/chalicelib/core/collaborations/collaboration_msteams.py index b94cc42f9..405c4cf47 100644 --- a/api/chalicelib/core/collaboration_msteams.py +++ b/api/chalicelib/core/collaborations/collaboration_msteams.py @@ -6,7 +6,7 @@ from fastapi import HTTPException, status import schemas from chalicelib.core import webhook -from chalicelib.core.collaboration_base import BaseCollaboration +from chalicelib.core.collaborations.collaboration_base import BaseCollaboration logger = logging.getLogger(__name__) diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaborations/collaboration_slack.py similarity index 98% rename from api/chalicelib/core/collaboration_slack.py rename to api/chalicelib/core/collaborations/collaboration_slack.py index 42cb88323..753bca3e4 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaborations/collaboration_slack.py @@ -6,7 +6,7 @@ from fastapi import HTTPException, status import schemas from chalicelib.core import webhook -from chalicelib.core.collaboration_base import BaseCollaboration +from chalicelib.core.collaborations.collaboration_base import BaseCollaboration class Slack(BaseCollaboration): diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 6a9e0ab10..a9d51dfc9 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -4,28 +4,14 @@ import logging from fastapi import HTTPException, status import schemas -from chalicelib.core import sessions, funnels, errors, issues, heatmaps, product_analytics, \ - custom_metrics_predefined +from chalicelib.core import funnels, errors, issues, heatmaps, product_analytics, custom_metrics_predefined +from chalicelib.core.sessions import sessions from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC logger = logging.getLogger(__name__) -# TODO: refactor this to split -# timeseries / -# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs -# remove "table of" calls from this function -def __try_live(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, - view_type=data.view_type, metric_type=data.metric_type, - metric_of=data.metric_of, metric_value=data.metric_value)) - - return results - - def __get_table_of_series(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): @@ -43,9 +29,6 @@ def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel "totalDropDueToIssues": 0 } - # return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, - # data=data.series[0].filter, - # metric_format=data.metric_format) return funnels.get_simple_funnel(project=project, data=data.series[0].filter, metric_format=data.metric_format) @@ -93,7 +76,12 @@ def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, dat def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None): - series_charts = __try_live(project_id=project.project_id, data=data) + series_charts = [] + for i, s in enumerate(data.series): + series_charts.append(sessions.search2_series(data=s.filter, project_id=project.project_id, density=data.density, + view_type=data.view_type, metric_type=data.metric_type, + metric_of=data.metric_of, metric_value=data.metric_value)) + results = [{}] * len(series_charts[0]) for i in range(len(results)): for j, series_chart in enumerate(series_charts): @@ -173,19 +161,12 @@ def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id schemas.MetricType.TABLE: __get_table_chart, schemas.MetricType.HEAT_MAP: __get_heat_map_chart, schemas.MetricType.FUNNEL: __get_funnel_chart, - schemas.MetricType.INSIGHTS: not_supported, schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart } return supported.get(data.metric_type, not_supported)(project=project, data=data, user_id=user_id) def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): return None results = [] @@ -220,7 +201,6 @@ def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.Card schemas.MetricType.TIMESERIES: not_supported, schemas.MetricType.TABLE: not_supported, schemas.MetricType.HEAT_MAP: not_supported, - schemas.MetricType.INSIGHTS: not_supported, schemas.MetricType.PATH_ANALYSIS: not_supported, } return supported.get(data.metric_type, not_supported)() @@ -555,17 +535,7 @@ def change_state(project_id, metric_id, user_id, status): def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, - data: schemas.CardSessionsSchema - # , range_value=None, start_date=None, end_date=None - ): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - # if metric is None: - # return None + data: schemas.CardSessionsSchema): if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): return None for s in data.series: diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 07bf02817..afb955355 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -1,9 +1,9 @@ from typing import Optional import schemas -from chalicelib.core import autocomplete +from chalicelib.core.autocomplete import autocomplete from chalicelib.core import issues -from chalicelib.core import sessions_metas +from chalicelib.core.sessions import sessions_metas from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.event_filter_definition import SupportedFilter, Event diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py index da38e453c..092b908ab 100644 --- a/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/heatmaps.py @@ -1,7 +1,8 @@ import logging import schemas -from chalicelib.core import sessions_mobs, sessions +from chalicelib.core import sessions +from chalicelib.core.sessions import sessions_mobs from chalicelib.utils import pg_client, helper from chalicelib.utils import sql_helper as sh diff --git a/api/chalicelib/core/issue_tracking/__init__.py b/api/chalicelib/core/issue_tracking/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/api/chalicelib/core/integration_base.py b/api/chalicelib/core/issue_tracking/base.py similarity index 100% rename from api/chalicelib/core/integration_base.py rename to api/chalicelib/core/issue_tracking/base.py diff --git a/api/chalicelib/core/integration_base_issue.py b/api/chalicelib/core/issue_tracking/base_issue.py similarity index 100% rename from api/chalicelib/core/integration_base_issue.py rename to api/chalicelib/core/issue_tracking/base_issue.py diff --git a/api/chalicelib/core/integration_github.py b/api/chalicelib/core/issue_tracking/github.py similarity index 95% rename from api/chalicelib/core/integration_github.py rename to api/chalicelib/core/issue_tracking/github.py index f82eb9067..c67559a26 100644 --- a/api/chalicelib/core/integration_github.py +++ b/api/chalicelib/core/issue_tracking/github.py @@ -1,6 +1,6 @@ import schemas -from chalicelib.core import integration_base -from chalicelib.core.integration_github_issue import GithubIntegrationIssue +from chalicelib.core.issue_tracking import integration_base +from chalicelib.core.issue_tracking.integration_github_issue import GithubIntegrationIssue from chalicelib.utils import pg_client, helper PROVIDER = schemas.IntegrationType.GITHUB diff --git a/api/chalicelib/core/integration_github_issue.py b/api/chalicelib/core/issue_tracking/github_issue.py similarity index 97% rename from api/chalicelib/core/integration_github_issue.py rename to api/chalicelib/core/issue_tracking/github_issue.py index 0c2b78720..594d5fa80 100644 --- a/api/chalicelib/core/integration_github_issue.py +++ b/api/chalicelib/core/issue_tracking/github_issue.py @@ -1,4 +1,4 @@ -from chalicelib.core.integration_base_issue import BaseIntegrationIssue +from chalicelib.core.issue_tracking.integration_base_issue import BaseIntegrationIssue from chalicelib.utils import github_client_v3 from chalicelib.utils.github_client_v3 import github_formatters as formatter diff --git a/api/chalicelib/core/integrations_global.py b/api/chalicelib/core/issue_tracking/integrations_global.py similarity index 94% rename from api/chalicelib/core/integrations_global.py rename to api/chalicelib/core/issue_tracking/integrations_global.py index 6959c9ff6..7fcb19e74 100644 --- a/api/chalicelib/core/integrations_global.py +++ b/api/chalicelib/core/issue_tracking/integrations_global.py @@ -1,4 +1,5 @@ import schemas +from chalicelib.core.issue_tracking.modules import TENANT_CONDITION from chalicelib.utils import pg_client @@ -51,10 +52,10 @@ def get_global_integrations_status(tenant_id, user_id, project_id): AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value}, + WHERE type='slack' AND deleted_at ISNULL AND {TENANT_CONDITION})) AS {schemas.IntegrationType.SLACK.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value}, + WHERE type='msteams' AND deleted_at ISNULL AND {TENANT_CONDITION})) AS {schemas.IntegrationType.MS_TEAMS.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s AND provider='dynatrace')) AS {schemas.IntegrationType.DYNATRACE.value};""", diff --git a/api/chalicelib/core/integrations_manager.py b/api/chalicelib/core/issue_tracking/integrations_manager.py similarity index 96% rename from api/chalicelib/core/integrations_manager.py rename to api/chalicelib/core/issue_tracking/integrations_manager.py index 5cc15cfba..82fbab152 100644 --- a/api/chalicelib/core/integrations_manager.py +++ b/api/chalicelib/core/issue_tracking/integrations_manager.py @@ -1,4 +1,4 @@ -from chalicelib.core import integration_github, integration_jira_cloud +from chalicelib.core.issue_tracking import integration_github, integration_jira_cloud from chalicelib.utils import pg_client SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER] diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/issue_tracking/jira_cloud.py similarity index 97% rename from api/chalicelib/core/integration_jira_cloud.py rename to api/chalicelib/core/issue_tracking/jira_cloud.py index 6df96efaf..85069dfac 100644 --- a/api/chalicelib/core/integration_jira_cloud.py +++ b/api/chalicelib/core/issue_tracking/jira_cloud.py @@ -1,6 +1,6 @@ import schemas -from chalicelib.core import integration_base -from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue +from chalicelib.core.issue_tracking import integration_base +from chalicelib.core.issue_tracking.integration_jira_cloud_issue import JIRACloudIntegrationIssue from chalicelib.utils import pg_client, helper PROVIDER = schemas.IntegrationType.JIRA diff --git a/api/chalicelib/core/integration_jira_cloud_issue.py b/api/chalicelib/core/issue_tracking/jira_cloud_issue.py similarity index 96% rename from api/chalicelib/core/integration_jira_cloud_issue.py rename to api/chalicelib/core/issue_tracking/jira_cloud_issue.py index bb847007a..e089c7482 100644 --- a/api/chalicelib/core/integration_jira_cloud_issue.py +++ b/api/chalicelib/core/issue_tracking/jira_cloud_issue.py @@ -1,5 +1,5 @@ from chalicelib.utils import jira_client -from chalicelib.core.integration_base_issue import BaseIntegrationIssue +from chalicelib.core.issue_tracking.integration_base_issue import BaseIntegrationIssue class JIRACloudIntegrationIssue(BaseIntegrationIssue): diff --git a/api/chalicelib/core/issue_tracking/modules/__init__.py b/api/chalicelib/core/issue_tracking/modules/__init__.py new file mode 100644 index 000000000..a9100a3ac --- /dev/null +++ b/api/chalicelib/core/issue_tracking/modules/__init__.py @@ -0,0 +1 @@ +TENANT_CONDITION = "TRUE" diff --git a/api/chalicelib/core/jobs.py b/api/chalicelib/core/jobs.py index 36c547455..ab9de83a2 100644 --- a/api/chalicelib/core/jobs.py +++ b/api/chalicelib/core/jobs.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.core import sessions_mobs, sessions_devtool +from chalicelib.core.sessions import sessions_mobs, sessions_devtool class Actions: diff --git a/api/chalicelib/core/log_tools/__init__.py b/api/chalicelib/core/log_tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/api/chalicelib/core/log_tool_bugsnag.py b/api/chalicelib/core/log_tools/bugsnag.py similarity index 100% rename from api/chalicelib/core/log_tool_bugsnag.py rename to api/chalicelib/core/log_tools/bugsnag.py diff --git a/api/chalicelib/core/log_tool_cloudwatch.py b/api/chalicelib/core/log_tools/cloudwatch.py similarity index 100% rename from api/chalicelib/core/log_tool_cloudwatch.py rename to api/chalicelib/core/log_tools/cloudwatch.py diff --git a/api/chalicelib/core/log_tool_datadog.py b/api/chalicelib/core/log_tools/datadog.py similarity index 100% rename from api/chalicelib/core/log_tool_datadog.py rename to api/chalicelib/core/log_tools/datadog.py diff --git a/api/chalicelib/core/log_tool_elasticsearch.py b/api/chalicelib/core/log_tools/elasticsearch.py similarity index 100% rename from api/chalicelib/core/log_tool_elasticsearch.py rename to api/chalicelib/core/log_tools/elasticsearch.py diff --git a/api/chalicelib/core/log_tools.py b/api/chalicelib/core/log_tools/log_tools.py similarity index 93% rename from api/chalicelib/core/log_tools.py rename to api/chalicelib/core/log_tools/log_tools.py index 040e9d7ba..78047ddb5 100644 --- a/api/chalicelib/core/log_tools.py +++ b/api/chalicelib/core/log_tools/log_tools.py @@ -1,5 +1,6 @@ from chalicelib.utils import pg_client, helper import json +from chalicelib.core.log_tools.modules import TENANT_CONDITION EXCEPT = ["jira_server", "jira_cloud"] @@ -94,11 +95,11 @@ def get_all_by_tenant(tenant_id, integration): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( - """SELECT integrations.* + f"""SELECT integrations.* FROM public.integrations INNER JOIN public.projects USING(project_id) - WHERE provider = %(provider)s + WHERE provider = %(provider)s AND {TENANT_CONDITION} AND projects.deleted_at ISNULL;""", - {"provider": integration}) + {"tenant_id": tenant_id, "provider": integration}) ) r = cur.fetchall() return helper.list_to_camel_case(r, flatten=True) diff --git a/api/chalicelib/core/log_tools/modules/__init__.py b/api/chalicelib/core/log_tools/modules/__init__.py new file mode 100644 index 000000000..a9100a3ac --- /dev/null +++ b/api/chalicelib/core/log_tools/modules/__init__.py @@ -0,0 +1 @@ +TENANT_CONDITION = "TRUE" diff --git a/api/chalicelib/core/log_tool_newrelic.py b/api/chalicelib/core/log_tools/newrelic.py similarity index 100% rename from api/chalicelib/core/log_tool_newrelic.py rename to api/chalicelib/core/log_tools/newrelic.py diff --git a/api/chalicelib/core/log_tool_rollbar.py b/api/chalicelib/core/log_tools/rollbar.py similarity index 100% rename from api/chalicelib/core/log_tool_rollbar.py rename to api/chalicelib/core/log_tools/rollbar.py diff --git a/api/chalicelib/core/log_tool_sentry.py b/api/chalicelib/core/log_tools/sentry.py similarity index 100% rename from api/chalicelib/core/log_tool_sentry.py rename to api/chalicelib/core/log_tools/sentry.py diff --git a/api/chalicelib/core/log_tool_stackdriver.py b/api/chalicelib/core/log_tools/stackdriver.py similarity index 100% rename from api/chalicelib/core/log_tool_stackdriver.py rename to api/chalicelib/core/log_tools/stackdriver.py diff --git a/api/chalicelib/core/log_tool_sumologic.py b/api/chalicelib/core/log_tools/sumologic.py similarity index 100% rename from api/chalicelib/core/log_tool_sumologic.py rename to api/chalicelib/core/log_tools/sumologic.py diff --git a/api/chalicelib/core/product_anaytics2.py b/api/chalicelib/core/product_anaytics2.py new file mode 100644 index 000000000..9e32e088d --- /dev/null +++ b/api/chalicelib/core/product_anaytics2.py @@ -0,0 +1,14 @@ +from chalicelib.utils.ch_client import ClickHouseClient + + +def search_events(project_id: int, data: dict): + with ClickHouseClient() as ch_client: + r = ch_client.format( + """SELECT * + FROM taha.events + WHERE project_id=%(project_id)s + ORDER BY created_at;""", + params={"project_id": project_id}) + x = ch_client.execute(r) + + return x diff --git a/api/chalicelib/core/sessions/__init__.py b/api/chalicelib/core/sessions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/api/chalicelib/core/performance_event.py b/api/chalicelib/core/sessions/performance_event.py similarity index 100% rename from api/chalicelib/core/performance_event.py rename to api/chalicelib/core/sessions/performance_event.py diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions/sessions.py similarity index 99% rename from api/chalicelib/core/sessions.py rename to api/chalicelib/core/sessions/sessions.py index 8e12d9a8d..b975c36c7 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions/sessions.py @@ -2,7 +2,8 @@ import logging from typing import List, Union import schemas -from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite +from chalicelib.core import events, metadata, projects +from chalicelib.core.sessions import sessions_favorite, performance_event from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh diff --git a/api/chalicelib/core/sessions_assignments.py b/api/chalicelib/core/sessions/sessions_assignments.py similarity index 98% rename from api/chalicelib/core/sessions_assignments.py rename to api/chalicelib/core/sessions/sessions_assignments.py index 567200b07..48664bf65 100644 --- a/api/chalicelib/core/sessions_assignments.py +++ b/api/chalicelib/core/sessions/sessions_assignments.py @@ -2,7 +2,7 @@ from decouple import config from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils import pg_client -from chalicelib.core import integrations_manager, integration_base_issue +from chalicelib.core.issue_tracking import integrations_manager, integration_base_issue import json diff --git a/ee/api/chalicelib/core/sessions_exp.py b/api/chalicelib/core/sessions/sessions_ch.py similarity index 98% rename from ee/api/chalicelib/core/sessions_exp.py rename to api/chalicelib/core/sessions/sessions_ch.py index 78124c923..ad3ed8579 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/api/chalicelib/core/sessions/sessions_ch.py @@ -3,11 +3,13 @@ import logging from typing import List, Union import schemas -from chalicelib.core import events, metadata, projects, performance_event, metrics, sessions_favorite, sessions_legacy +from chalicelib.core import events, metadata, projects, metrics, sessions +from chalicelib.core.sessions import sessions_favorite, performance_event from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper from chalicelib.utils import sql_helper as sh logger = logging.getLogger(__name__) + SESSION_PROJECTION_COLS_CH = """\ s.project_id, s.session_id AS session_id, @@ -1690,24 +1692,4 @@ def check_recording_status(project_id: int) -> dict: # TODO: rewrite this function to use ClickHouse def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id', ascending: bool = False) -> dict: - if session_ids is None or len(session_ids) == 0: - return {"total": 0, "sessions": []} - with pg_client.PostgresClient() as cur: - meta_keys = metadata.get(project_id=project_id) - params = {"project_id": project_id, "session_ids": tuple(session_ids)} - order_direction = 'ASC' if ascending else 'DESC' - main_query = cur.mogrify(f"""SELECT {sessions_legacy.SESSION_PROJECTION_BASE_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - FROM public.sessions AS s - WHERE project_id=%(project_id)s - AND session_id IN %(session_ids)s - ORDER BY {sort_by} {order_direction};""", params) - - cur.execute(main_query) - rows = cur.fetchall() - if len(meta_keys) > 0: - for s in rows: - s["metadata"] = {} - for m in meta_keys: - s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}') - return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)} + return sessions.search_sessions_by_ids(project_id, session_ids, sort_by, ascending) diff --git a/api/chalicelib/core/sessions_devtool.py b/api/chalicelib/core/sessions/sessions_devtool.py similarity index 100% rename from api/chalicelib/core/sessions_devtool.py rename to api/chalicelib/core/sessions/sessions_devtool.py diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions/sessions_favorite.py similarity index 100% rename from api/chalicelib/core/sessions_favorite.py rename to api/chalicelib/core/sessions/sessions_favorite.py diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions/sessions_metas.py similarity index 98% rename from api/chalicelib/core/sessions_metas.py rename to api/chalicelib/core/sessions/sessions_metas.py index 6cfd0bad5..f0aca0db1 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions/sessions_metas.py @@ -1,5 +1,5 @@ import schemas -from chalicelib.core import autocomplete +from chalicelib.core.autocomplete import autocomplete from chalicelib.utils.event_filter_definition import SupportedFilter SUPPORTED_TYPES = { @@ -42,7 +42,7 @@ SUPPORTED_TYPES = { schemas.FilterType.UTM_SOURCE: SupportedFilter( get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE), query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)), - # IOS + # Mobile schemas.FilterType.USER_OS_MOBILE: SupportedFilter( get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE), query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)), diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions/sessions_mobs.py similarity index 100% rename from api/chalicelib/core/sessions_mobs.py rename to api/chalicelib/core/sessions/sessions_mobs.py diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions/sessions_notes.py similarity index 98% rename from api/chalicelib/core/sessions_notes.py rename to api/chalicelib/core/sessions/sessions_notes.py index 200f6399d..2e42b9c1f 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions/sessions_notes.py @@ -4,8 +4,8 @@ from urllib.parse import urljoin from decouple import config import schemas -from chalicelib.core.collaboration_msteams import MSTeams -from chalicelib.core.collaboration_slack import Slack +from chalicelib.core.collaborations.collaboration_msteams import MSTeams +from chalicelib.core.collaborations.collaboration_slack import Slack from chalicelib.utils import pg_client, helper from chalicelib.utils import sql_helper as sh from chalicelib.utils.TimeUTC import TimeUTC diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions/sessions_replay.py similarity index 98% rename from api/chalicelib/core/sessions_replay.py rename to api/chalicelib/core/sessions/sessions_replay.py index ad8b2a62e..0cdd1dad1 100644 --- a/api/chalicelib/core/sessions_replay.py +++ b/api/chalicelib/core/sessions/sessions_replay.py @@ -1,6 +1,7 @@ import schemas from chalicelib.core import events, metadata, events_mobile, \ - sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing + issues, assist, canvas, user_testing +from chalicelib.core.sessions import sessions_mobs, sessions_devtool from chalicelib.utils import errors_helper from chalicelib.utils import pg_client, helper diff --git a/api/chalicelib/core/sessions_viewed.py b/api/chalicelib/core/sessions/sessions_viewed.py similarity index 100% rename from api/chalicelib/core/sessions_viewed.py rename to api/chalicelib/core/sessions/sessions_viewed.py diff --git a/api/chalicelib/core/unprocessed_sessions.py b/api/chalicelib/core/sessions/unprocessed_sessions.py similarity index 100% rename from api/chalicelib/core/unprocessed_sessions.py rename to api/chalicelib/core/sessions/unprocessed_sessions.py diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index fd8a3af17..d3ae2a443 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -765,30 +765,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id, - metric_format: schemas.MetricExtendedFormatType): - output = [] - stages = filter_d.events - - if len(stages) == 0: - logger.debug("no stages found") - return output, 0 - - # The result of the multi-stage query - rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) - # Obtain the first part of the output - stages_list = get_stages(stages, rows, metric_format=metric_format) - if len(rows) == 0: - return stages_list, 0 - - # Obtain the second part of the output - total_drop_due_to_issues = get_issues(stages, rows, - first_stage=1, - last_stage=len(filter_d.events), - drop_only=True) - return stages_list, total_drop_due_to_issues - - def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None): output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) stages = filter_d.events diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 07e4a1bf1..3c3f68772 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -457,12 +457,6 @@ def set_password_invitation(user_id, new_password): user = update(tenant_id=-1, user_id=user_id, changes=changes) r = authenticate(user['email'], new_password) - tenant_id = r.pop("tenantId") - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - return { "jwt": r.pop("jwt"), "refreshToken": r.pop("refreshToken"), @@ -470,10 +464,7 @@ def set_password_invitation(user_id, new_password): "spotJwt": r.pop("spotJwt"), "spotRefreshToken": r.pop("spotRefreshToken"), "spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"), - 'data': { - "scopeState": scope.get_scope(-1), - "user": r - } + **r } diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index afec28054..a3fe7b4be 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -129,13 +129,13 @@ def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") if data.webhook_id is not None: return update(tenant_id=tenant_id, webhook_id=data.webhook_id, - changes={"endpoint": data.endpoint.unicode_string(), + changes={"endpoint": data.endpoint, "authHeader": data.auth_header, "name": data.name}, replace_none=replace_none) else: return add(tenant_id=tenant_id, - endpoint=data.endpoint.unicode_string(), + endpoint=data.endpoint, auth_header=data.auth_header, name=data.name, replace_none=replace_none) diff --git a/api/chalicelib/utils/__init__.py b/api/chalicelib/utils/__init__.py index df64e4775..54e0b4c65 100644 --- a/api/chalicelib/utils/__init__.py +++ b/api/chalicelib/utils/__init__.py @@ -11,3 +11,9 @@ if smtp.has_smtp(): logger.info("valid SMTP configuration found") else: logger.info("no SMTP configuration found or SMTP validation failed") + +if config("EXP_CH_DRIVER", cast=bool, default=True): + logging.info(">>> Using new CH driver") + from . import ch_client_exp as ch_client +else: + from . import ch_client diff --git a/ee/api/chalicelib/utils/ch_client.py b/api/chalicelib/utils/ch_client.py similarity index 65% rename from ee/api/chalicelib/utils/ch_client.py rename to api/chalicelib/utils/ch_client.py index b7d19b4f9..d1ed07515 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/api/chalicelib/utils/ch_client.py @@ -3,15 +3,15 @@ import logging import clickhouse_driver from decouple import config -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) +logger = logging.getLogger(__name__) settings = {} if config('ch_timeout', cast=int, default=-1) > 0: - logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s") + logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s") settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)} if config('ch_receive_timeout', cast=int, default=-1) > 0: - logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s") + logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s") settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)} @@ -35,20 +35,20 @@ class ClickHouseClient: def __enter__(self): return self - def execute(self, query, params=None, **args): + def execute(self, query, parameters=None, **args): try: - results = self.__client.execute(query=query, params=params, with_column_types=True, **args) + results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args) keys = tuple(x for x, y in results[1]) return [dict(zip(keys, i)) for i in results[0]] except Exception as err: - logging.error("--------- CH EXCEPTION -----------") - logging.error(err) - logging.error("--------- CH QUERY EXCEPTION -----------") - logging.error(self.format(query=query, params=params) - .replace('\n', '\\n') - .replace(' ', ' ') - .replace(' ', ' ')) - logging.error("--------------------") + logger.error("--------- CH EXCEPTION -----------") + logger.error(err) + logger.error("--------- CH QUERY EXCEPTION -----------") + logger.error(self.format(query=query, parameters=parameters) + .replace('\n', '\\n') + .replace(' ', ' ') + .replace(' ', ' ')) + logger.error("--------------------") raise err def insert(self, query, params=None, **args): @@ -57,10 +57,18 @@ class ClickHouseClient: def client(self): return self.__client - def format(self, query, params): - if params is None: + def format(self, query, parameters): + if parameters is None: return query - return self.__client.substitute_params(query, params, self.__client.connection.context) + return self.__client.substitute_params(query, parameters, self.__client.connection.context) def __exit__(self, *args): pass + + +async def init(): + logger.info(f">CH_POOL:not defined") + + +async def terminate(): + pass diff --git a/api/chalicelib/utils/ch_client_exp.py b/api/chalicelib/utils/ch_client_exp.py new file mode 100644 index 000000000..8bdb4c20b --- /dev/null +++ b/api/chalicelib/utils/ch_client_exp.py @@ -0,0 +1,176 @@ +import logging +import threading +import time +from functools import wraps +from queue import Queue, Empty + +import clickhouse_connect +from clickhouse_connect.driver.query import QueryContext +from clickhouse_connect.driver.exceptions import DatabaseError +from decouple import config + +logger = logging.getLogger(__name__) + +_CH_CONFIG = {"host": config("ch_host"), + "user": config("ch_user", default="default"), + "password": config("ch_password", default=""), + "port": config("ch_port_http", cast=int), + "client_name": config("APP_NAME", default="PY")} +CH_CONFIG = dict(_CH_CONFIG) + +settings = {} +if config('ch_timeout', cast=int, default=-1) > 0: + logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s") + settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)} + +if config('ch_receive_timeout', cast=int, default=-1) > 0: + logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s") + settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)} + +extra_args = {} +if config("CH_COMPRESSION", cast=bool, default=True): + extra_args["compression"] = "lz4" + + +def transform_result(original_function): + @wraps(original_function) + def wrapper(*args, **kwargs): + logger.info("Executing query on CH") + result = original_function(*args, **kwargs) + if isinstance(result, clickhouse_connect.driver.query.QueryResult): + column_names = result.column_names + result = result.result_rows + result = [dict(zip(column_names, row)) for row in result] + + return result + + return wrapper + + +class ClickHouseConnectionPool: + def __init__(self, min_size, max_size): + self.min_size = min_size + self.max_size = max_size + self.pool = Queue() + self.lock = threading.Lock() + self.total_connections = 0 + + # Initialize the pool with min_size connections + for _ in range(self.min_size): + client = clickhouse_connect.get_client(**CH_CONFIG, + database=config("ch_database", default="default"), + settings=settings, + **extra_args) + self.pool.put(client) + self.total_connections += 1 + + def get_connection(self): + try: + # Try to get a connection without blocking + client = self.pool.get_nowait() + return client + except Empty: + with self.lock: + if self.total_connections < self.max_size: + client = clickhouse_connect.get_client(**CH_CONFIG, + database=config("ch_database", default="default"), + settings=settings, + **extra_args) + self.total_connections += 1 + return client + # If max_size reached, wait until a connection is available + client = self.pool.get() + return client + + def release_connection(self, client): + self.pool.put(client) + + def close_all(self): + with self.lock: + while not self.pool.empty(): + client = self.pool.get() + client.close() + self.total_connections = 0 + + +CH_pool: ClickHouseConnectionPool = None + +RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50) +RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2) +RETRY = 0 + + +def make_pool(): + if not config('CH_POOL', cast=bool, default=True): + return + global CH_pool + global RETRY + if CH_pool is not None: + try: + CH_pool.close_all() + except Exception as error: + logger.error("Error while closing all connexions to CH", error) + try: + CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4), + max_size=config("CH_MAXCONN", cast=int, default=8)) + if CH_pool is not None: + logger.info("Connection pool created successfully for CH") + except ConnectionError as error: + logger.error("Error while connecting to CH", error) + if RETRY < RETRY_MAX: + RETRY += 1 + logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}") + time.sleep(RETRY_INTERVAL) + make_pool() + else: + raise error + + +class ClickHouseClient: + __client = None + + def __init__(self, database=None): + if self.__client is None: + if database is None and config('CH_POOL', cast=bool, default=True): + self.__client = CH_pool.get_connection() + else: + self.__client = clickhouse_connect.get_client(**CH_CONFIG, + database=database if database else config("ch_database", + default="default"), + settings=settings, + **extra_args) + self.__client.execute = transform_result(self.__client.query) + self.__client.format = self.format + + def __enter__(self): + return self.__client + + def format(self, query, *, parameters=None): + if parameters is None: + return query + return query % { + key: f"'{value}'" if isinstance(value, str) else value + for key, value in parameters.items() + } + + def __exit__(self, *args): + if config('CH_POOL', cast=bool, default=True): + CH_pool.release_connection(self.__client) + else: + self.__client.close() + + +async def init(): + logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}") + if config('CH_POOL', cast=bool, default=True): + make_pool() + + +async def terminate(): + global CH_pool + if CH_pool is not None: + try: + CH_pool.close_all() + logger.info("Closed all connexions to CH") + except Exception as error: + logger.error("Error while closing all connexions to CH", error) diff --git a/api/chalicelib/utils/exp_ch_helper.py b/api/chalicelib/utils/exp_ch_helper.py new file mode 100644 index 000000000..cd8fb052f --- /dev/null +++ b/api/chalicelib/utils/exp_ch_helper.py @@ -0,0 +1,57 @@ +from typing import Union + +import schemas +import logging + +logger = logging.getLogger(__name__) + + +def get_main_events_table(timestamp=0, platform="web"): + if platform == "web": + return "experimental.events" + else: + return "experimental.ios_events" + + +def get_main_sessions_table(timestamp=0): + return "experimental.sessions" + + + +def get_main_js_errors_sessions_table(timestamp=0): + return get_main_events_table(timestamp=timestamp) + + +def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType], platform="web"): + defs = { + schemas.EventType.CLICK: "CLICK", + schemas.EventType.INPUT: "INPUT", + schemas.EventType.LOCATION: "LOCATION", + schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: "LOCATION", + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: "LOCATION", + schemas.PerformanceEventType.LOCATION_TTFB: "LOCATION", + schemas.EventType.CUSTOM: "CUSTOM", + schemas.EventType.REQUEST: "REQUEST", + schemas.EventType.REQUEST_DETAILS: "REQUEST", + schemas.PerformanceEventType.FETCH_FAILED: "REQUEST", + schemas.GraphqlFilterType.GRAPHQL_NAME: "GRAPHQL", + schemas.EventType.STATE_ACTION: "STATEACTION", + schemas.EventType.ERROR: "ERROR", + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE', + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE', + schemas.FetchFilterType.FETCH_URL: 'REQUEST' + } + defs_mobile = { + schemas.EventType.CLICK_MOBILE: "TAP", + schemas.EventType.INPUT_MOBILE: "INPUT", + schemas.EventType.CUSTOM_MOBILE: "CUSTOM", + schemas.EventType.REQUEST_MOBILE: "REQUEST", + schemas.EventType.ERROR_MOBILE: "CRASH", + schemas.EventType.VIEW_MOBILE: "VIEW", + schemas.EventType.SWIPE_MOBILE: "SWIPE" + } + if platform != "web" and event_type in defs_mobile: + return defs_mobile.get(event_type) + if event_type not in defs: + raise Exception(f"unsupported EventType:{event_type}") + return defs.get(event_type) diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index b97ab005e..29ea84873 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -166,7 +166,7 @@ class PostgresClient: async def init(): - logger.info(f">PG_POOL:{config('PG_POOL', default=None)}") + logger.info(f">use PG_POOL:{config('PG_POOL', default=True)}") if config('PG_POOL', cast=bool, default=True): make_pool() diff --git a/api/env.default b/api/env.default index 8e80f2ea2..e54f9dfb4 100644 --- a/api/env.default +++ b/api/env.default @@ -8,6 +8,12 @@ assistList=/sockets-list CANVAS_PATTERN=%(sessionId)s/%(recordingId)s.tar.zst captcha_key= captcha_server= +CH_COMPRESSION=true +ch_host= +ch_port=9000 +ch_port_http=8123 +ch_receive_timeout=10 +ch_timeout=30 change_password_link=/reset-password?invitation=%s&&pass=%s DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools @@ -63,4 +69,7 @@ SITE_URL= sourcemaps_bucket=sourcemaps sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/sourcemaps/{}/sourcemaps STAGE=default-foss -TZ=UTC \ No newline at end of file +TZ=UTC +EXP_CH_DRIVER=true +EXP_AUTOCOMPLETE=true +EXP_ALERTS=true \ No newline at end of file diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index 84004bd53..ee8bbc950 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,18 +1,19 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 -boto3==1.35.60 -pyjwt==2.9.0 +boto3==1.35.76 +pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 +clickhouse-driver[lz4]==0.2.9 +clickhouse-connect==0.8.9 elasticsearch==8.16.0 jira==3.8.0 cachetools==5.5.0 -fastapi==0.115.5 -uvicorn[standard]==0.32.0 +fastapi==0.115.6 +uvicorn[standard]==0.32.1 python-decouple==3.8 -pydantic[email]==2.9.2 -apscheduler==3.10.4 +pydantic[email]==2.10.3 +apscheduler==3.11.0 diff --git a/api/requirements.txt b/api/requirements.txt index df6c2c527..d643061f1 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,20 +1,21 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 -boto3==1.35.60 -pyjwt==2.9.0 +boto3==1.35.76 +pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 +clickhouse-driver[lz4]==0.2.9 +clickhouse-connect==0.8.9 elasticsearch==8.16.0 jira==3.8.0 cachetools==5.5.0 -fastapi==0.115.5 -uvicorn[standard]==0.32.0 +fastapi==0.115.6 +uvicorn[standard]==0.32.1 python-decouple==3.8 -pydantic[email]==2.9.2 -apscheduler==3.10.4 +pydantic[email]==2.10.3 +apscheduler==3.11.0 -redis==5.2.0 +redis==5.2.1 diff --git a/api/routers/core.py b/api/routers/core.py index e965f54f1..1efbec2ff 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -4,13 +4,18 @@ from decouple import config from fastapi import Depends, Body, BackgroundTasks import schemas -from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, alerts, issues, \ - integrations_manager, metadata, log_tool_elasticsearch, log_tool_datadog, log_tool_stackdriver, reset_password, \ - log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, log_tool_newrelic, announcements, \ - log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, assist, mobile, tenants, boarding, \ - notifications, webhook, users, custom_metrics, saved_search, integrations_global, tags, autocomplete -from chalicelib.core.collaboration_msteams import MSTeams -from chalicelib.core.collaboration_slack import Slack +from chalicelib.core import sourcemaps, events, projects, alerts, issues, \ + metadata, reset_password, \ + log_tools, sessions, announcements, \ + weekly_report, assist, mobile, tenants, boarding, \ + notifications, webhook, users, custom_metrics, saved_search, tags, autocomplete +from chalicelib.core.issue_tracking import integration_github, integrations_global, integrations_manager, \ + integration_jira_cloud +from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \ + sentry, bugsnag, cloudwatch, sumologic, rollbar +from chalicelib.core.sessions import sessions_assignments +from chalicelib.core.collaborations.collaboration_msteams import MSTeams +from chalicelib.core.collaborations.collaboration_slack import Slack from or_dependencies import OR_context, OR_role from routers.base import get_routers @@ -91,217 +96,217 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source: @app.get('/integrations/sentry', tags=["integrations"]) def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.get_all(tenant_id=context.tenant_id)} + return {"data": sentry.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/sentry', tags=["integrations"]) def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.get(project_id=projectId)} + return {"data": sentry.get(project_id=projectId)} @app.post('/{projectId}/integrations/sentry', tags=["integrations"]) def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sentry', tags=["integrations"]) def delete_sentry(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": sentry.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"]) def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)} + return {"data": sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)} @app.get('/integrations/datadog', tags=["integrations"]) def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_datadog.get_all(tenant_id=context.tenant_id)} + return {"data": datadog.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/datadog', tags=["integrations"]) def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_datadog.get(project_id=projectId)} + return {"data": datadog.get(project_id=projectId)} @app.post('/{projectId}/integrations/datadog', tags=["integrations"]) def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/datadog', tags=["integrations"]) def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": datadog.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/stackdriver', tags=["integrations"]) def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_stackdriver.get_all(tenant_id=context.tenant_id)} + return {"data": stackdriver.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/stackdriver', tags=["integrations"]) def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_stackdriver.get(project_id=projectId)} + return {"data": stackdriver.get(project_id=projectId)} @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"]) def delete_stackdriver(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/newrelic', tags=["integrations"]) def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_newrelic.get_all(tenant_id=context.tenant_id)} + return {"data": newrelic.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/newrelic', tags=["integrations"]) def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_newrelic.get(project_id=projectId)} + return {"data": newrelic.get(project_id=projectId)} @app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/newrelic', tags=["integrations"]) def delete_newrelic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/rollbar', tags=["integrations"]) def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_rollbar.get_all(tenant_id=context.tenant_id)} + return {"data": rollbar.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/rollbar', tags=["integrations"]) def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_rollbar.get(project_id=projectId)} + return {"data": rollbar.get(project_id=projectId)} @app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/rollbar', tags=["integrations"]) def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorization_token)} + return {"data": bugsnag.list_projects(auth_token=data.authorization_token)} @app.get('/integrations/bugsnag', tags=["integrations"]) def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.get_all(tenant_id=context.tenant_id)} + return {"data": bugsnag.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/bugsnag', tags=["integrations"]) def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.get(project_id=projectId)} + return {"data": bugsnag.get(project_id=projectId)} @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"]) def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, - aws_secret_access_key=data.awsSecretAccessKey, - region=data.region)} + return {"data": cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, + aws_secret_access_key=data.awsSecretAccessKey, + region=data.region)} @app.get('/integrations/cloudwatch', tags=["integrations"]) def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.get_all(tenant_id=context.tenant_id)} + return {"data": cloudwatch.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/cloudwatch', tags=["integrations"]) def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.get(project_id=projectId)} + return {"data": cloudwatch.get(project_id=projectId)} @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"]) def delete_cloudwatch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/elasticsearch', tags=["integrations"]) def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_elasticsearch.get_all(tenant_id=context.tenant_id)} + return {"data": elasticsearch.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/elasticsearch', tags=["integrations"]) def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_elasticsearch.get(project_id=projectId)} + return {"data": elasticsearch.get(project_id=projectId)} @app.post('/integrations/elasticsearch/test', tags=["integrations"]) def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, data=data)} + return {"data": elasticsearch.ping(tenant_id=context.tenant_id, data=data)} @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { - "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + "data": elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"]) def delete_elasticsearch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/sumologic', tags=["integrations"]) def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sumologic.get_all(tenant_id=context.tenant_id)} + return {"data": sumologic.get_all(tenant_id=context.tenant_id)} @app.get('/{projectId}/integrations/sumologic', tags=["integrations"]) def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sumologic.get(project_id=projectId)} + return {"data": sumologic.get(project_id=projectId)} @app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} + return {"data": sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sumologic', tags=["integrations"]) def delete_sumologic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)} + return {"data": sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)} @app.get('/integrations/issues', tags=["integrations"]) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 4f349b5e2..026aea6f5 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -8,13 +8,13 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re import schemas from chalicelib.core import scope -from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist, sessions_notes, sessions_replay, signup, feature_flags -from chalicelib.core import sessions_viewed +from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, heatmaps, \ + assist, signup, feature_flags +from chalicelib.core.sessions import sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \ + sessions_assignments, unprocessed_sessions from chalicelib.core import tenants, users, projects, license -from chalicelib.core import unprocessed_sessions from chalicelib.core import webhook -from chalicelib.core.collaboration_slack import Slack +from chalicelib.core.collaborations.collaboration_slack import Slack from chalicelib.utils import captcha, smtp from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC diff --git a/api/routers/subs/product_anaytics.py b/api/routers/subs/product_anaytics.py new file mode 100644 index 000000000..5f5de83c1 --- /dev/null +++ b/api/routers/subs/product_anaytics.py @@ -0,0 +1,17 @@ +from typing import Union + +import schemas +from chalicelib.core import product_anaytics2 +from fastapi import Body, Depends +from or_dependencies import OR_context +from routers.base import get_routers + + +public_app, app, app_apikey = get_routers() + + +@app.post('/{projectId}/events/search', tags=["dashboard"]) +def search_events(projectId: int, + # data: schemas.CreateDashboardSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return product_anaytics2.search_events(project_id=projectId, data={}) diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py index 5aa0b4e5f..44ffc9335 100644 --- a/api/schemas/schemas.py +++ b/api/schemas/schemas.py @@ -11,59 +11,6 @@ from .transformers_validators import transform_email, remove_whitespace, remove_ force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric -def transform_old_filter_type(cls, values): - if values.get("type") is None: - return values - values["type"] = { - # filters - "USEROS": FilterType.USER_OS.value, - "USERBROWSER": FilterType.USER_BROWSER.value, - "USERDEVICE": FilterType.USER_DEVICE.value, - "USERCOUNTRY": FilterType.USER_COUNTRY.value, - "USERID": FilterType.USER_ID.value, - "USERANONYMOUSID": FilterType.USER_ANONYMOUS_ID.value, - "REFERRER": FilterType.REFERRER.value, - "REVID": FilterType.REV_ID.value, - "USEROS_IOS": FilterType.USER_OS_MOBILE.value, - "USERDEVICE_IOS": FilterType.USER_DEVICE_MOBILE.value, - "USERCOUNTRY_IOS": FilterType.USER_COUNTRY_MOBILE.value, - "USERID_IOS": FilterType.USER_ID_MOBILE.value, - "USERANONYMOUSID_IOS": FilterType.USER_ANONYMOUS_ID_MOBILE.value, - "REVID_IOS": FilterType.REV_ID_MOBILE.value, - "DURATION": FilterType.DURATION.value, - "PLATFORM": FilterType.PLATFORM.value, - "METADATA": FilterType.METADATA.value, - "ISSUE": FilterType.ISSUE.value, - "EVENTS_COUNT": FilterType.EVENTS_COUNT.value, - "UTM_SOURCE": FilterType.UTM_SOURCE.value, - "UTM_MEDIUM": FilterType.UTM_MEDIUM.value, - "UTM_CAMPAIGN": FilterType.UTM_CAMPAIGN.value, - # events: - "CLICK": EventType.CLICK.value, - "INPUT": EventType.INPUT.value, - "LOCATION": EventType.LOCATION.value, - "CUSTOM": EventType.CUSTOM.value, - "REQUEST": EventType.REQUEST.value, - "FETCH": EventType.REQUEST_DETAILS.value, - "GRAPHQL": EventType.GRAPHQL.value, - "STATEACTION": EventType.STATE_ACTION.value, - "ERROR": EventType.ERROR.value, - "CLICK_IOS": EventType.CLICK_MOBILE.value, - "INPUT_IOS": EventType.INPUT_MOBILE.value, - "VIEW_IOS": EventType.VIEW_MOBILE.value, - "CUSTOM_IOS": EventType.CUSTOM_MOBILE.value, - "REQUEST_IOS": EventType.REQUEST_MOBILE.value, - "ERROR_IOS": EventType.ERROR_MOBILE.value, - "DOM_COMPLETE": PerformanceEventType.LOCATION_DOM_COMPLETE.value, - "LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME.value, - "TTFB": PerformanceEventType.LOCATION_TTFB.value, - "AVG_CPU_LOAD": PerformanceEventType.LOCATION_AVG_CPU_LOAD.value, - "AVG_MEMORY_USAGE": PerformanceEventType.LOCATION_AVG_MEMORY_USAGE.value, - "FETCH_FAILED": PerformanceEventType.FETCH_FAILED.value, - }.get(values["type"], values["type"]) - return values - - class _GRecaptcha(BaseModel): g_recaptcha_response: Optional[str] = Field(default=None, alias='g-recaptcha-response') @@ -211,7 +158,8 @@ class IssueTrackingJiraSchema(IssueTrackingIntegration): class WebhookSchema(BaseModel): webhook_id: Optional[int] = Field(default=None) - endpoint: AnyHttpUrl = Field(...) + processed_endpoint: AnyHttpUrl = Field(..., alias="endpoint") + endpoint: Optional[str] = Field(default=None, doc_hidden=True) auth_header: Optional[str] = Field(default=None) name: str = Field(default="", max_length=100, pattern=NAME_PATTERN) @@ -601,7 +549,6 @@ class SessionSearchEventSchema2(BaseModel): _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) _single_to_list_values = field_validator('value', mode='before')(single_to_list) - _transform = model_validator(mode='before')(transform_old_filter_type) @model_validator(mode="after") def event_validator(self): @@ -638,7 +585,6 @@ class SessionSearchFilterSchema(BaseModel): source: Optional[Union[ErrorSource, str]] = Field(default=None) _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) - _transform = model_validator(mode='before')(transform_old_filter_type) _single_to_list_values = field_validator('value', mode='before')(single_to_list) @model_validator(mode="before") @@ -754,6 +700,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): for f in values.get("filters", []): vals = [] for v in f.get("value", []): + if f.get("type", "") == FilterType.DURATION.value and v is None: + v = 0 if v is not None and (f.get("type", "") != FilterType.DURATION.value or str(v).isnumeric()): vals.append(v) @@ -895,6 +843,11 @@ class CardSeriesSchema(BaseModel): class MetricTimeseriesViewType(str, Enum): LINE_CHART = "lineChart" AREA_CHART = "areaChart" + BAR_CHART = "barChart" + PIE_CHART = "pieChart" + PROGRESS_CHART = "progressChart" + TABLE_CHART = "table" + METRIC_CHART = "metric" class MetricTableViewType(str, Enum): @@ -918,7 +871,6 @@ class MetricType(str, Enum): RETENTION = "retention" STICKINESS = "stickiness" HEAT_MAP = "heatMap" - INSIGHTS = "insights" class MetricOfErrors(str, Enum): @@ -1194,31 +1146,6 @@ class CardHeatMap(__CardSchema): return self -class MetricOfInsights(str, Enum): - ISSUE_CATEGORIES = "issueCategories" - - -class CardInsights(__CardSchema): - metric_type: Literal[MetricType.INSIGHTS] - metric_of: MetricOfInsights = Field(default=MetricOfInsights.ISSUE_CATEGORIES) - view_type: MetricOtherViewType = Field(...) - - @model_validator(mode="before") - @classmethod - def __enforce_default(cls, values): - values["view_type"] = MetricOtherViewType.LIST_CHART - return values - - @model_validator(mode="after") - def __transform(self): - self.metric_of = MetricOfInsights(self.metric_of) - return self - - @model_validator(mode="after") - def restrictions(self): - raise ValueError(f"metricType:{MetricType.INSIGHTS} not supported yet.") - - class CardPathAnalysisSeriesSchema(CardSeriesSchema): name: Optional[str] = Field(default=None) filter: PathAnalysisSchema = Field(...) @@ -1295,7 +1222,7 @@ __cards_union_base = Union[ CardErrors, CardWebVital, CardHeatMap, CardPathAnalysis] -CardSchema = ORUnion(Union[__cards_union_base, CardInsights], discriminator='metric_type') +CardSchema = ORUnion(__cards_union_base, discriminator='metric_type') class UpdateCardStatusSchema(BaseModel): @@ -1379,8 +1306,6 @@ class LiveSessionSearchFilterSchema(BaseModel): operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] \ = Field(default=SearchEventOperator.CONTAINS) - _transform = model_validator(mode='before')(transform_old_filter_type) - @model_validator(mode="after") def __validator(self): if self.type is not None and self.type == LiveFilterType.METADATA: diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index 5d75b02d7..a3eac941c 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -6,6 +6,7 @@ import ( config "openreplay/backend/internal/config/db" "openreplay/backend/internal/db" "openreplay/backend/internal/db/datasaver" + "openreplay/backend/pkg/db/clickhouse" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/db/postgres/pool" "openreplay/backend/pkg/db/redis" @@ -33,9 +34,15 @@ func main() { } defer pgConn.Close() - // Init events module - pg := postgres.NewConn(log, pgConn) - defer pg.Close() + chConn := clickhouse.NewConnector(cfg.Clickhouse) + if err := chConn.Prepare(); err != nil { + log.Fatal(ctx, "can't prepare clickhouse: %s", err) + } + defer chConn.Stop() + + // Init db proxy module (postgres + clickhouse + batches) + dbProxy := postgres.NewConn(log, pgConn, chConn) + defer dbProxy.Close() // Init redis connection redisClient, err := redis.New(&cfg.Redis) @@ -49,7 +56,7 @@ func main() { tagsManager := tags.New(log, pgConn) // Init data saver - saver := datasaver.New(log, cfg, pg, sessManager, tagsManager) + saver := datasaver.New(log, cfg, dbProxy, chConn, sessManager, tagsManager) // Message filter msgFilter := []int{ diff --git a/backend/internal/config/common/config.go b/backend/internal/config/common/config.go index dd21d2ae0..a2db40c48 100644 --- a/backend/internal/config/common/config.go +++ b/backend/internal/config/common/config.go @@ -57,10 +57,18 @@ type Redshift struct { // Clickhouse config type Clickhouse struct { - URL string `env:"CLICKHOUSE_STRING"` - Database string `env:"CLICKHOUSE_DATABASE,default=default"` - UserName string `env:"CLICKHOUSE_USERNAME,default=default"` - Password string `env:"CLICKHOUSE_PASSWORD,default="` + URL string `env:"CLICKHOUSE_STRING"` + Database string `env:"CLICKHOUSE_DATABASE,default=default"` + UserName string `env:"CLICKHOUSE_USERNAME,default=default"` + Password string `env:"CLICKHOUSE_PASSWORD,default="` + LegacyUserName string `env:"CH_USERNAME,default=default"` + LegacyPassword string `env:"CH_PASSWORD,default="` +} + +func (cfg *Clickhouse) GetTrimmedURL() string { + chUrl := strings.TrimPrefix(cfg.URL, "tcp://") + chUrl = strings.TrimSuffix(chUrl, "/default") + return chUrl } // ElasticSearch config diff --git a/backend/internal/config/db/config.go b/backend/internal/config/db/config.go index 48d49dc62..e6f45e18a 100644 --- a/backend/internal/config/db/config.go +++ b/backend/internal/config/db/config.go @@ -11,6 +11,7 @@ import ( type Config struct { common.Config common.Postgres + common.Clickhouse redis.Redis ProjectExpiration time.Duration `env:"PROJECT_EXPIRATION,default=10m"` LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"` diff --git a/backend/internal/db/datasaver/fts.go b/backend/internal/db/datasaver/fts.go new file mode 100644 index 000000000..64ca17bc4 --- /dev/null +++ b/backend/internal/db/datasaver/fts.go @@ -0,0 +1,9 @@ +package datasaver + +import ( + "openreplay/backend/pkg/messages" +) + +func (s *saverImpl) init() {} + +func (s *saverImpl) sendToFTS(msg messages.Message, projID uint32) {} diff --git a/backend/internal/db/datasaver/methods.go b/backend/internal/db/datasaver/methods.go deleted file mode 100644 index 07a8b6ba2..000000000 --- a/backend/internal/db/datasaver/methods.go +++ /dev/null @@ -1,17 +0,0 @@ -package datasaver - -import ( - . "openreplay/backend/pkg/messages" -) - -func (s *saverImpl) init() { - // noop -} - -func (s *saverImpl) handleExtraMessage(msg Message) error { - switch m := msg.(type) { - case *PerformanceTrackAggr: - return s.pg.InsertWebStatsPerformance(m) - } - return nil -} diff --git a/backend/internal/db/datasaver/mobile.go b/backend/internal/db/datasaver/mobile.go new file mode 100644 index 000000000..3c9e01a0a --- /dev/null +++ b/backend/internal/db/datasaver/mobile.go @@ -0,0 +1,72 @@ +package datasaver + +import ( + "context" + + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/sessions" +) + +func (s *saverImpl) handleMobileMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error { + switch m := msg.(type) { + case *messages.MobileSessionEnd: + return s.ch.InsertMobileSession(session) + case *messages.MobileUserID: + if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil { + return err + } + s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID) + return nil + case *messages.MobileUserAnonymousID: + if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil { + return err + } + s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID) + return nil + case *messages.MobileMetadata: + return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value) + case *messages.MobileEvent: + if err := s.pg.InsertMobileEvent(session, m); err != nil { + return err + } + return s.ch.InsertMobileCustom(session, m) + case *messages.MobileClickEvent: + if err := s.pg.InsertMobileClickEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil { + return err + } + return s.ch.InsertMobileClick(session, m) + case *messages.MobileSwipeEvent: + if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil { + return err + } + return s.ch.InsertMobileSwipe(session, m) + case *messages.MobileInputEvent: + if err := s.pg.InsertMobileInputEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil { + return err + } + return s.ch.InsertMobileInput(session, m) + case *messages.MobileNetworkCall: + if err := s.pg.InsertMobileNetworkCall(session, m); err != nil { + return err + } + return s.ch.InsertMobileRequest(session, m, session.SaveRequestPayload) + case *messages.MobileCrash: + if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil { + return err + } + if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil { + return err + } + return s.ch.InsertMobileCrash(session, m) + } + return nil +} diff --git a/backend/internal/db/datasaver/saver.go b/backend/internal/db/datasaver/saver.go index d3d217e4b..476a81e9b 100644 --- a/backend/internal/db/datasaver/saver.go +++ b/backend/internal/db/datasaver/saver.go @@ -30,11 +30,18 @@ type saverImpl struct { tags tags.Tags } -func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions.Sessions, tags tags.Tags) Saver { +func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Connector, session sessions.Sessions, tags tags.Tags) Saver { + switch { + case pg == nil: + log.Fatal(context.Background(), "pg pool is empty") + case ch == nil: + log.Fatal(context.Background(), "ch pool is empty") + } s := &saverImpl{ log: log, cfg: cfg, pg: pg, + ch: ch, sessions: session, tags: tags, } @@ -43,21 +50,34 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions. } func (s *saverImpl) Handle(msg Message) { - sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID()) if msg.TypeID() == MsgCustomEvent { defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent))) } + + var ( + sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID()) + session *sessions.Session + err error + ) + if msg.TypeID() == MsgSessionEnd || msg.TypeID() == MsgMobileSessionEnd { + session, err = s.sessions.GetUpdated(msg.SessionID(), true) + } else { + session, err = s.sessions.Get(msg.SessionID()) + } + if err != nil || session == nil { + s.log.Error(sessCtx, "error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) + return + } + if IsMobileType(msg.TypeID()) { - // Handle Mobile messages - if err := s.handleMobileMessage(msg); err != nil { + if err := s.handleMobileMessage(sessCtx, session, msg); err != nil { if !postgres.IsPkeyViolation(err) { s.log.Error(sessCtx, "mobile message insertion error, msg: %+v, err: %s", msg, err) } return } } else { - // Handle Web messages - if err := s.handleMessage(msg); err != nil { + if err := s.handleWebMessage(sessCtx, session, msg); err != nil { if !postgres.IsPkeyViolation(err) { s.log.Error(sessCtx, "web message insertion error, msg: %+v, err: %s", msg, err) } @@ -65,180 +85,22 @@ func (s *saverImpl) Handle(msg Message) { } } - if err := s.handleExtraMessage(msg); err != nil { - s.log.Error(sessCtx, "extra message insertion error, msg: %+v, err: %s", msg, err) - } + s.sendToFTS(msg, session.ProjectID) return } -func (s *saverImpl) handleMobileMessage(msg Message) error { - session, err := s.sessions.Get(msg.SessionID()) - if err != nil { - return err - } - switch m := msg.(type) { - case *MobileUserID: - if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil { - return err - } - s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID) - return nil - case *MobileUserAnonymousID: - if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil { - return err - } - s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID) - return nil - case *MobileMetadata: - return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value) - case *MobileEvent: - return s.pg.InsertMobileEvent(session, m) - case *MobileClickEvent: - if err := s.pg.InsertMobileClickEvent(session, m); err != nil { - return err - } - return s.sessions.UpdateEventsStats(session.SessionID, 1, 0) - case *MobileSwipeEvent: - if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil { - return err - } - return s.sessions.UpdateEventsStats(session.SessionID, 1, 0) - case *MobileInputEvent: - if err := s.pg.InsertMobileInputEvent(session, m); err != nil { - return err - } - return s.sessions.UpdateEventsStats(session.SessionID, 1, 0) - case *MobileNetworkCall: - return s.pg.InsertMobileNetworkCall(session, m) - case *MobileCrash: - if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil { - return err - } - return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000) - } - return nil -} - -func (s *saverImpl) handleMessage(msg Message) error { - session, err := s.sessions.Get(msg.SessionID()) - if err != nil { - return err - } - sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID()) - switch m := msg.(type) { - case *SessionStart: - return s.pg.HandleStartEvent(m) - case *SessionEnd: - return s.pg.HandleEndEvent(m.SessionID()) - case *Metadata: - return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value) - case *IssueEvent: - if m.Type == "dead_click" || m.Type == "click_rage" { - if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) { - return nil - } - } - err = s.pg.InsertIssueEvent(session, m) - if err != nil { - return err - } - return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type)) - case *CustomIssue: - ie := &IssueEvent{ - Type: "custom", - Timestamp: m.Timestamp, - MessageID: m.Index, - ContextString: m.Name, - Payload: m.Payload, - } - ie.SetMeta(m.Meta()) - if err = s.pg.InsertIssueEvent(session, ie); err != nil { - return err - } - return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type)) - case *UserID: - if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil { - return err - } - s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID) - return nil - case *UserAnonymousID: - if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil { - return err - } - s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID) - return nil - case *CustomEvent: - return s.pg.InsertWebCustomEvent(session, m) - case *MouseClick: - if err = s.pg.InsertWebClickEvent(session, m); err != nil { - return err - } - return s.sessions.UpdateEventsStats(session.SessionID, 1, 0) - case *PageEvent: - if err = s.pg.InsertWebPageEvent(session, m); err != nil { - return err - } - s.sessions.UpdateReferrer(session.SessionID, m.Referrer) - s.sessions.UpdateUTM(session.SessionID, m.URL) - return s.sessions.UpdateEventsStats(session.SessionID, 1, 1) - case *NetworkRequest: - return s.pg.InsertWebNetworkRequest(session, m) - case *GraphQL: - return s.pg.InsertWebGraphQL(session, m) - case *JSException: - wrapper, err := types.WrapJSException(m) - if err != nil { - s.log.Warn(sessCtx, "error on wrapping JSException: %v", err) - } - if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil { - return err - } - return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000) - case *IntegrationEvent: - return s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) - case *InputChange: - if err = s.pg.InsertInputChangeEvent(session, m); err != nil { - return err - } - return s.sessions.UpdateEventsStats(session.SessionID, 1, 0) - case *MouseThrashing: - if err = s.pg.InsertMouseThrashing(session, m); err != nil { - return err - } - return s.sessions.UpdateIssuesStats(session.SessionID, 0, 50) - case *CanvasNode: - if err = s.pg.InsertCanvasNode(session, m); err != nil { - return err - } - case *TagTrigger: - if err = s.pg.InsertTagTrigger(session, m); err != nil { - return err - } - } - return nil -} - func (s *saverImpl) Commit() error { - if s.pg != nil { - s.pg.Commit() - } - if s.ch != nil { - s.ch.Commit() - } + s.pg.Commit() + s.ch.Commit() return nil } func (s *saverImpl) Close() error { - if s.pg != nil { - if err := s.pg.Close(); err != nil { - s.log.Error(context.Background(), "pg.Close error: %s", err) - } + if err := s.pg.Close(); err != nil { + s.log.Error(context.Background(), "pg.Close error: %s", err) } - if s.ch != nil { - if err := s.ch.Stop(); err != nil { - s.log.Error(context.Background(), "ch.Close error: %s", err) - } + if err := s.ch.Stop(); err != nil { + s.log.Error(context.Background(), "ch.Close error: %s", err) } return nil } diff --git a/backend/internal/db/datasaver/web.go b/backend/internal/db/datasaver/web.go new file mode 100644 index 000000000..439bcec32 --- /dev/null +++ b/backend/internal/db/datasaver/web.go @@ -0,0 +1,146 @@ +package datasaver + +import ( + "context" + + "openreplay/backend/pkg/db/postgres" + "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/sessions" +) + +func (s *saverImpl) handleWebMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error { + switch m := msg.(type) { + case *messages.SessionStart: + return s.pg.HandleStartEvent(m) + case *messages.SessionEnd: + if err := s.pg.HandleEndEvent(m.SessionID()); err != nil { + return err + } + session, err := s.sessions.GetUpdated(m.SessionID(), true) + if err != nil { + return err + } + return s.ch.InsertWebSession(session) + case *messages.Metadata: + return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value) + case *messages.IssueEvent: + if m.Type == "dead_click" || m.Type == "click_rage" { + if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) { + return nil + } + } + if err := s.pg.InsertIssueEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type)); err != nil { + return err + } + return s.ch.InsertIssue(session, m) + case *messages.CustomIssue: + ie := &messages.IssueEvent{ + Type: "custom", + Timestamp: m.Timestamp, + MessageID: m.Index, + ContextString: m.Name, + Payload: m.Payload, + } + ie.SetMeta(m.Meta()) + if err := s.pg.InsertIssueEvent(session, ie); err != nil { + return err + } + return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type)) + case *messages.UserID: + if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil { + return err + } + s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID) + return nil + case *messages.UserAnonymousID: + if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil { + return err + } + s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID) + return nil + case *messages.CustomEvent: + if err := s.pg.InsertWebCustomEvent(session, m); err != nil { + return err + } + return s.ch.InsertCustom(session, m) + case *messages.MouseClick: + if err := s.pg.InsertWebClickEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil { + return err + } + return s.ch.InsertWebClickEvent(session, m) + case *messages.PageEvent: + if err := s.pg.InsertWebPageEvent(session, m); err != nil { + return err + } + s.sessions.UpdateReferrer(session.SessionID, m.Referrer) + s.sessions.UpdateUTM(session.SessionID, m.URL) + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 1); err != nil { + return err + } + return s.ch.InsertWebPageEvent(session, m) + case *messages.NetworkRequest: + if err := s.pg.InsertWebNetworkRequest(session, m); err != nil { + return err + } + return s.ch.InsertRequest(session, m, session.SaveRequestPayload) + case *messages.GraphQL: + if err := s.pg.InsertWebGraphQL(session, m); err != nil { + return err + } + return s.ch.InsertGraphQL(session, m) + case *messages.JSException: + wrapper, err := types.WrapJSException(m) + if err != nil { + s.log.Warn(sessCtx, "error on wrapping JSException: %v", err) + } + if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil { + return err + } + if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil { + return err + } + return s.ch.InsertWebErrorEvent(session, wrapper) + case *messages.IntegrationEvent: + if err := s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)); err != nil { + return err + } + return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) + case *messages.InputChange: + if err := s.pg.InsertInputChangeEvent(session, m); err != nil { + return err + } + if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil { + return err + } + return s.ch.InsertWebInputDuration(session, m) + case *messages.MouseThrashing: + if err := s.pg.InsertMouseThrashing(session, m); err != nil { + return err + } + if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, 50); err != nil { + return err + } + return s.ch.InsertMouseThrashing(session, m) + case *messages.CanvasNode: + if err := s.pg.InsertCanvasNode(session, m); err != nil { + return err + } + case *messages.TagTrigger: + if err := s.pg.InsertTagTrigger(session, m); err != nil { + return err + } + case *messages.PerformanceTrackAggr: + if err := s.pg.InsertWebStatsPerformance(m); err != nil { + return err + } + return s.ch.InsertWebPerformanceTrackAggr(session, m) + } + return nil +} diff --git a/ee/backend/pkg/db/clickhouse/bulk.go b/backend/pkg/db/clickhouse/bulk.go similarity index 99% rename from ee/backend/pkg/db/clickhouse/bulk.go rename to backend/pkg/db/clickhouse/bulk.go index 6eb8d98fd..f070f4a15 100644 --- a/ee/backend/pkg/db/clickhouse/bulk.go +++ b/backend/pkg/db/clickhouse/bulk.go @@ -5,10 +5,11 @@ import ( "errors" "fmt" "log" - "openreplay/backend/pkg/metrics/database" "time" "github.com/ClickHouse/clickhouse-go/v2/lib/driver" + + "openreplay/backend/pkg/metrics/database" ) type Bulk interface { diff --git a/backend/pkg/db/clickhouse/connector.go b/backend/pkg/db/clickhouse/connector.go index 727ad7f7b..71d94ab85 100644 --- a/backend/pkg/db/clickhouse/connector.go +++ b/backend/pkg/db/clickhouse/connector.go @@ -1,19 +1,31 @@ package clickhouse import ( + "errors" + "fmt" + "log" + "strings" + "time" + + "github.com/ClickHouse/clickhouse-go/v2" + "github.com/ClickHouse/clickhouse-go/v2/lib/driver" + + "openreplay/backend/internal/config/common" "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/hashid" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/sessions" + "openreplay/backend/pkg/url" ) type Connector interface { Prepare() error Commit() error Stop() error + // Web InsertWebSession(session *sessions.Session) error InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error - InsertWebInputEvent(session *sessions.Session, msg *messages.InputEvent) error InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error @@ -21,4 +33,669 @@ type Connector interface { InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error + InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error + InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error + // Mobile + InsertMobileSession(session *sessions.Session) error + InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error + InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error + InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error + InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error + InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error + InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error +} + +type task struct { + bulks []Bulk +} + +func NewTask() *task { + return &task{bulks: make([]Bulk, 0, 21)} +} + +type connectorImpl struct { + conn driver.Conn + batches map[string]Bulk //driver.Batch + workerTask chan *task + done chan struct{} + finished chan struct{} +} + +func NewConnector(cfg common.Clickhouse) Connector { + conn, err := clickhouse.Open(&clickhouse.Options{ + Addr: []string{cfg.GetTrimmedURL()}, + Auth: clickhouse.Auth{ + Database: cfg.Database, + Username: cfg.LegacyUserName, + Password: cfg.LegacyPassword, + }, + MaxOpenConns: 20, + MaxIdleConns: 15, + ConnMaxLifetime: 3 * time.Minute, + Compression: &clickhouse.Compression{ + Method: clickhouse.CompressionLZ4, + }, + }) + if err != nil { + log.Fatal(err) + } + + c := &connectorImpl{ + conn: conn, + batches: make(map[string]Bulk, 20), + workerTask: make(chan *task, 1), + done: make(chan struct{}), + finished: make(chan struct{}), + } + go c.worker() + return c +} + +func (c *connectorImpl) newBatch(name, query string) error { + batch, err := NewBulk(c.conn, name, query) + if err != nil { + return fmt.Errorf("can't create new batch: %s", err) + } + c.batches[name] = batch + return nil +} + +var batches = map[string]string{ + // Web + "sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?)", + "autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))", + "pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, url_path, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?)", + "clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type, selector, normalized_x, normalized_y, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))", + "inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + "errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type, transfer_size, url_path) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000))", + "custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", + "graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + "issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))", + "issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)", + //Mobile + "ios_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)", + "ios_custom": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", + "ios_clicks": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)", + "ios_swipes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, direction, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", + "ios_inputs": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)", + "ios_requests": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)", + "ios_crashes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, reason, stacktrace, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", +} + +func (c *connectorImpl) Prepare() error { + for table, query := range batches { + if err := c.newBatch(table, query); err != nil { + return fmt.Errorf("can't create %s batch: %s", table, err) + } + } + return nil +} + +func (c *connectorImpl) Commit() error { + newTask := NewTask() + for _, b := range c.batches { + newTask.bulks = append(newTask.bulks, b) + } + c.batches = make(map[string]Bulk, 20) + if err := c.Prepare(); err != nil { + log.Printf("can't prepare new CH batch set: %s", err) + } + c.workerTask <- newTask + return nil +} + +func (c *connectorImpl) Stop() error { + c.done <- struct{}{} + <-c.finished + return c.conn.Close() +} + +func (c *connectorImpl) sendBulks(t *task) { + for _, b := range t.bulks { + if err := b.Send(); err != nil { + log.Printf("can't send batch: %s", err) + } + } +} + +func (c *connectorImpl) worker() { + for { + select { + case t := <-c.workerTask: + c.sendBulks(t) + case <-c.done: + for t := range c.workerTask { + c.sendBulks(t) + } + c.finished <- struct{}{} + return + } + } +} + +func (c *connectorImpl) checkError(name string, err error) { + if err != clickhouse.ErrBatchAlreadySent { + log.Printf("can't create %s batch after failed append operation: %s", name, err) + } +} + +func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error { + if msg.Label == "" { + return nil + } + if err := c.batches["inputs"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + "INPUT", + nullableUint16(uint16(msg.InputDuration)), + nullableUint32(uint32(msg.HesitationTime)), + ); err != nil { + c.checkError("inputs", err) + return fmt.Errorf("can't append to inputs batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error { + issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp) + // Insert issue event to batches + if err := c.batches["issuesEvents"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + issueID, + "mouse_thrashing", + "ISSUE", + msg.Url, + extractUrlPath(msg.Url), + ); err != nil { + c.checkError("issuesEvents", err) + return fmt.Errorf("can't append to issuesEvents batch: %s", err) + } + if err := c.batches["issues"].Append( + uint16(session.ProjectID), + issueID, + "mouse_thrashing", + msg.Url, + ); err != nil { + c.checkError("issues", err) + return fmt.Errorf("can't append to issues batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error { + issueID := hashid.IssueID(session.ProjectID, msg) + // Check issue type before insert to avoid panic from clickhouse lib + switch msg.Type { + case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing", "app_crash": + default: + return fmt.Errorf("unknown issueType: %s", msg.Type) + } + // Insert issue event to batches + if err := c.batches["issuesEvents"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MessageID, + datetime(msg.Timestamp), + issueID, + msg.Type, + "ISSUE", + msg.URL, + extractUrlPath(msg.URL), + ); err != nil { + c.checkError("issuesEvents", err) + return fmt.Errorf("can't append to issuesEvents batch: %s", err) + } + if err := c.batches["issues"].Append( + uint16(session.ProjectID), + issueID, + msg.Type, + msg.ContextString, + ); err != nil { + c.checkError("issues", err) + return fmt.Errorf("can't append to issues batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertWebSession(session *sessions.Session) error { + if session.Duration == nil { + return errors.New("trying to insert session with nil duration") + } + if err := c.batches["sessions"].Append( + session.SessionID, + uint16(session.ProjectID), + session.UserID, + session.UserUUID, + session.UserOS, + nullableString(session.UserOSVersion), + nullableString(session.UserDevice), + session.UserDeviceType, + session.UserCountry, + session.UserState, + session.UserCity, + datetime(session.Timestamp), + uint32(*session.Duration), + uint16(session.PagesCount), + uint16(session.EventsCount), + uint16(session.ErrorsCount), + uint32(session.IssueScore), + session.Referrer, + session.IssueTypes, + session.TrackerVersion, + session.UserBrowser, + nullableString(session.UserBrowserVersion), + session.Metadata1, + session.Metadata2, + session.Metadata3, + session.Metadata4, + session.Metadata5, + session.Metadata6, + session.Metadata7, + session.Metadata8, + session.Metadata9, + session.Metadata10, + session.Timezone, + session.UtmSource, + session.UtmMedium, + session.UtmCampaign, + ); err != nil { + c.checkError("sessions", err) + return fmt.Errorf("can't append to sessions batch: %s", err) + } + return nil +} + +func extractUrlPath(fullUrl string) string { + _, path, query, err := url.GetURLParts(fullUrl) + if err != nil { + log.Printf("can't parse url: %s", err) + return "" + } + pathQuery := path + if query != "" { + pathQuery += "?" + query + } + return strings.ToLower(pathQuery) +} + +func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error { + if err := c.batches["pages"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MessageID, + datetime(msg.Timestamp), + msg.URL, + nullableUint16(uint16(msg.RequestStart)), + nullableUint16(uint16(msg.ResponseStart)), + nullableUint16(uint16(msg.ResponseEnd)), + nullableUint16(uint16(msg.DomContentLoadedEventStart)), + nullableUint16(uint16(msg.DomContentLoadedEventEnd)), + nullableUint16(uint16(msg.LoadEventStart)), + nullableUint16(uint16(msg.LoadEventEnd)), + nullableUint16(uint16(msg.FirstPaint)), + nullableUint16(uint16(msg.FirstContentfulPaint)), + nullableUint16(uint16(msg.SpeedIndex)), + nullableUint16(uint16(msg.VisuallyComplete)), + nullableUint16(uint16(msg.TimeToInteractive)), + extractUrlPath(msg.URL), + "LOCATION", + ); err != nil { + c.checkError("pages", err) + return fmt.Errorf("can't append to pages batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error { + if msg.Label == "" { + return nil + } + var nX *float32 = nil + var nY *float32 = nil + if msg.NormalizedX != 101 && msg.NormalizedY != 101 { + // To support previous versions of tracker + if msg.NormalizedX <= 100 && msg.NormalizedY <= 100 { + msg.NormalizedX *= 100 + msg.NormalizedY *= 100 + } + normalizedX := float32(msg.NormalizedX) / 100.0 + normalizedY := float32(msg.NormalizedY) / 100.0 + nXVal := normalizedX + nX = &nXVal + nYVal := normalizedY + nY = &nYVal + } + if err := c.batches["clicks"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + nullableUint32(uint32(msg.HesitationTime)), + "CLICK", + msg.Selector, + nX, + nY, + msg.Url, + extractUrlPath(msg.Url), + ); err != nil { + c.checkError("clicks", err) + return fmt.Errorf("can't append to clicks batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error { + keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags)) + for k, v := range msg.Tags { + keys = append(keys, k) + values = append(values, v) + } + // Check error source before insert to avoid panic from clickhouse lib + switch msg.Source { + case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic": + default: + return fmt.Errorf("unknown error source: %s", msg.Source) + } + msgID, _ := msg.ID(session.ProjectID) + // Insert event to batch + if err := c.batches["errors"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MessageID, + datetime(msg.Timestamp), + msg.Source, + nullableString(msg.Name), + msg.Message, + msgID, + "ERROR", + keys, + values, + ); err != nil { + c.checkError("errors", err) + return fmt.Errorf("can't append to errors batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error { + var timestamp uint64 = (msg.TimestampStart + msg.TimestampEnd) / 2 + if err := c.batches["performance"].Append( + session.SessionID, + uint16(session.ProjectID), + uint64(0), // TODO: find messageID for performance events + datetime(timestamp), + nullableString(msg.Meta().Url), + uint8(msg.MinFPS), + uint8(msg.AvgFPS), + uint8(msg.MaxFPS), + uint8(msg.MinCPU), + uint8(msg.AvgCPU), + uint8(msg.MaxCPU), + msg.MinTotalJSHeapSize, + msg.AvgTotalJSHeapSize, + msg.MaxTotalJSHeapSize, + msg.MinUsedJSHeapSize, + msg.AvgUsedJSHeapSize, + msg.MaxUsedJSHeapSize, + "PERFORMANCE", + ); err != nil { + c.checkError("performance", err) + return fmt.Errorf("can't append to performance batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error { + if len(msgValue) == 0 { + return nil + } + if err := c.batches["autocompletes"].Append( + uint16(session.ProjectID), + msgType, + msgValue, + ); err != nil { + c.checkError("autocompletes", err) + return fmt.Errorf("can't append to autocompletes batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error { + urlMethod := url.EnsureMethod(msg.Method) + if urlMethod == "" { + return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method) + } + var request, response *string + if savePayload { + request = &msg.Request + response = &msg.Response + } + if err := c.batches["requests"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), + msg.URL, + request, + response, + uint16(msg.Status), + url.EnsureMethod(msg.Method), + uint16(msg.Duration), + msg.Status < 400, + "REQUEST", + uint32(msg.TransferredBodySize), + extractUrlPath(msg.URL), + ); err != nil { + c.checkError("requests", err) + return fmt.Errorf("can't append to requests batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error { + if err := c.batches["custom"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), + msg.Name, + msg.Payload, + "CUSTOM", + ); err != nil { + c.checkError("custom", err) + return fmt.Errorf("can't append to custom batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error { + if err := c.batches["graphql"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), + msg.OperationName, + nullableString(msg.Variables), + nullableString(msg.Response), + "GRAPHQL", + ); err != nil { + c.checkError("graphql", err) + return fmt.Errorf("can't append to graphql batch: %s", err) + } + return nil +} + +// Mobile events + +func (c *connectorImpl) InsertMobileSession(session *sessions.Session) error { + if session.Duration == nil { + return errors.New("trying to insert mobile session with nil duration") + } + if err := c.batches["ios_sessions"].Append( + session.SessionID, + uint16(session.ProjectID), + session.UserID, + session.UserUUID, + session.UserOS, + nullableString(session.UserOSVersion), + nullableString(session.UserDevice), + session.UserDeviceType, + session.UserCountry, + session.UserState, + session.UserCity, + datetime(session.Timestamp), + uint32(*session.Duration), + uint16(session.PagesCount), + uint16(session.EventsCount), + uint16(session.ErrorsCount), + uint32(session.IssueScore), + session.Referrer, + session.IssueTypes, + session.TrackerVersion, + session.UserBrowser, + nullableString(session.UserBrowserVersion), + session.Metadata1, + session.Metadata2, + session.Metadata3, + session.Metadata4, + session.Metadata5, + session.Metadata6, + session.Metadata7, + session.Metadata8, + session.Metadata9, + session.Metadata10, + "ios", + session.Timezone, + ); err != nil { + c.checkError("ios_sessions", err) + return fmt.Errorf("can't append to sessions batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error { + if err := c.batches["ios_custom"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), + msg.Name, + msg.Payload, + "CUSTOM", + ); err != nil { + c.checkError("ios_custom", err) + return fmt.Errorf("can't append to mobile custom batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error { + if msg.Label == "" { + return nil + } + if err := c.batches["ios_clicks"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + "TAP", + ); err != nil { + c.checkError("ios_clicks", err) + return fmt.Errorf("can't append to mobile clicks batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error { + if msg.Label == "" { + return nil + } + if err := c.batches["ios_swipes"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + nullableString(msg.Direction), + "SWIPE", + ); err != nil { + c.checkError("ios_clicks", err) + return fmt.Errorf("can't append to mobile clicks batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error { + if msg.Label == "" { + return nil + } + if err := c.batches["ios_inputs"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Label, + "INPUT", + ); err != nil { + c.checkError("ios_inputs", err) + return fmt.Errorf("can't append to mobile inputs batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error { + urlMethod := url.EnsureMethod(msg.Method) + if urlMethod == "" { + return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method) + } + var request, response *string + if savePayload { + request = &msg.Request + response = &msg.Response + } + if err := c.batches["ios_requests"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), + msg.URL, + request, + response, + uint16(msg.Status), + url.EnsureMethod(msg.Method), + uint16(msg.Duration), + msg.Status < 400, + "REQUEST", + ); err != nil { + c.checkError("ios_requests", err) + return fmt.Errorf("can't append to mobile requests batch: %s", err) + } + return nil +} + +func (c *connectorImpl) InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error { + if err := c.batches["ios_crashes"].Append( + session.SessionID, + uint16(session.ProjectID), + msg.MsgID(), + datetime(msg.Timestamp), + msg.Name, + msg.Reason, + msg.Stacktrace, + "CRASH", + ); err != nil { + c.checkError("ios_crashes", err) + return fmt.Errorf("can't append to mobile crashges batch: %s", err) + } + return nil } diff --git a/ee/backend/pkg/db/clickhouse/insert_type.go b/backend/pkg/db/clickhouse/insert_type.go similarity index 100% rename from ee/backend/pkg/db/clickhouse/insert_type.go rename to backend/pkg/db/clickhouse/insert_type.go diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index cda778d7c..7ee1f997f 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -19,20 +19,17 @@ type Conn struct { Pool pool.Pool batches *batch.BatchSet bulks *BulkSet - chConn CH // hack for autocomplete inserts, TODO: rewrite + chConn CH } -func (conn *Conn) SetClickHouse(ch CH) { - conn.chConn = ch -} - -func NewConn(log logger.Logger, pool pool.Pool) *Conn { +func NewConn(log logger.Logger, pool pool.Pool, ch CH) *Conn { if pool == nil { log.Fatal(context.Background(), "pg pool is empty") } return &Conn{ log: log, Pool: pool, + chConn: ch, bulks: NewBulkSet(log, pool), batches: batch.NewBatchSet(log, pool), } diff --git a/backend/pkg/integrations/clients/datadog.go b/backend/pkg/integrations/clients/datadog.go index 1c574695d..01c97f85a 100644 --- a/backend/pkg/integrations/clients/datadog.go +++ b/backend/pkg/integrations/clients/datadog.go @@ -27,7 +27,7 @@ func (d *dataDogClient) FetchSessionData(credentials interface{}, sessionID uint // Not a struct, will try to parse as JSON string strCfg, ok := credentials.(map[string]interface{}) if !ok { - return nil, fmt.Errorf("invalid credentials, got: %+v", credentials) + return nil, fmt.Errorf("invalid credentials") } cfg = datadogConfig{} if site, ok := strCfg["site"].(string); ok { diff --git a/backend/pkg/integrations/clients/dynatrace.go b/backend/pkg/integrations/clients/dynatrace.go index 06ad8419c..8de76cb03 100644 --- a/backend/pkg/integrations/clients/dynatrace.go +++ b/backend/pkg/integrations/clients/dynatrace.go @@ -32,7 +32,7 @@ func (d *dynatraceClient) FetchSessionData(credentials interface{}, sessionID ui if !ok { strCfg, ok := credentials.(map[string]interface{}) if !ok { - return nil, fmt.Errorf("invalid credentials, got: %+v", credentials) + return nil, fmt.Errorf("invalid credentials") } cfg = dynatraceConfig{} if val, ok := strCfg["environment"].(string); ok { diff --git a/backend/pkg/integrations/clients/elastic.go b/backend/pkg/integrations/clients/elastic.go index 36754b159..2d3ffc4e2 100644 --- a/backend/pkg/integrations/clients/elastic.go +++ b/backend/pkg/integrations/clients/elastic.go @@ -5,7 +5,6 @@ import ( "encoding/base64" "encoding/json" "fmt" - "log" "strings" "github.com/elastic/go-elasticsearch/v8" @@ -29,7 +28,7 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI if !ok { strCfg, ok := credentials.(map[string]interface{}) if !ok { - return nil, fmt.Errorf("invalid credentials, got: %+v", credentials) + return nil, fmt.Errorf("invalid credentials") } cfg = elasticsearchConfig{} if val, ok := strCfg["url"].(string); ok { @@ -55,7 +54,7 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI // Create Elasticsearch client es, err := elasticsearch.NewClient(clientCfg) if err != nil { - log.Fatalf("Error creating the client: %s", err) + return nil, fmt.Errorf("error creating the client: %s", err) } var buf strings.Builder @@ -79,17 +78,17 @@ func (e *elasticsearchClient) FetchSessionData(credentials interface{}, sessionI es.Search.WithTrackTotalHits(true), ) if err != nil { - log.Fatalf("Error getting response: %s", err) + return nil, fmt.Errorf("error getting response: %s", err) } defer res.Body.Close() if res.IsError() { - log.Fatalf("Error: %s", res.String()) + return nil, fmt.Errorf("error: %s", res.String()) } var r map[string]interface{} if err := json.NewDecoder(res.Body).Decode(&r); err != nil { - log.Fatalf("Error parsing the response body: %s", err) + return nil, fmt.Errorf("error parsing the response body: %s", err) } if r["hits"] == nil { return nil, fmt.Errorf("no logs found") diff --git a/backend/pkg/integrations/clients/sentry.go b/backend/pkg/integrations/clients/sentry.go index 9fbaf4759..99ddc995c 100644 --- a/backend/pkg/integrations/clients/sentry.go +++ b/backend/pkg/integrations/clients/sentry.go @@ -4,7 +4,6 @@ import ( "encoding/json" "fmt" "io" - "log" "net/http" "net/url" ) @@ -35,7 +34,7 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6 if !ok { strCfg, ok := credentials.(map[string]interface{}) if !ok { - return nil, fmt.Errorf("invalid credentials, got: %+v", credentials) + return nil, fmt.Errorf("invalid credentials") } cfg = sentryConfig{} if val, ok := strCfg["organization_slug"].(string); ok { @@ -62,7 +61,7 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6 // Create a new request req, err := http.NewRequest("GET", requestUrl, nil) if err != nil { - log.Fatalf("Failed to create request: %v", err) + return nil, fmt.Errorf("failed to create request: %v", err) } // Add Authorization header @@ -72,26 +71,26 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6 client := &http.Client{} resp, err := client.Do(req) if err != nil { - log.Fatalf("Failed to send request: %v", err) + return nil, fmt.Errorf("failed to send request: %v", err) } defer resp.Body.Close() // Check if the response status is OK if resp.StatusCode != http.StatusOK { - log.Fatalf("Failed to fetch logs, status code: %v", resp.StatusCode) + return nil, fmt.Errorf("failed to fetch logs, status code: %v", resp.StatusCode) } // Read the response body body, err := io.ReadAll(resp.Body) if err != nil { - log.Fatalf("Failed to read response body: %v", err) + return nil, fmt.Errorf("failed to read response body: %v", err) } // Parse the JSON response var events []SentryEvent err = json.Unmarshal(body, &events) if err != nil { - log.Fatalf("Failed to parse JSON: %v", err) + return nil, fmt.Errorf("failed to parse JSON: %v", err) } if events == nil || len(events) == 0 { return nil, fmt.Errorf("no logs found") diff --git a/backend/pkg/sessions/sessions.go b/backend/pkg/sessions/sessions.go index 446fd1b1f..bd2519cc6 100644 --- a/backend/pkg/sessions/sessions.go +++ b/backend/pkg/sessions/sessions.go @@ -16,7 +16,7 @@ type Sessions interface { AddUnStarted(session *UnStartedSession) error AddCached(sessionID uint64, data map[string]string) error Get(sessionID uint64) (*Session, error) - GetUpdated(sessionID uint64) (*Session, error) + GetUpdated(sessionID uint64, keepInCache bool) (*Session, error) GetCached(sessionID uint64) (map[string]string, error) GetDuration(sessionID uint64) (uint64, error) UpdateDuration(sessionID uint64, timestamp uint64) (uint64, error) @@ -104,11 +104,14 @@ func (s *sessionsImpl) Get(sessionID uint64) (*Session, error) { } // Special method for clickhouse connector -func (s *sessionsImpl) GetUpdated(sessionID uint64) (*Session, error) { +func (s *sessionsImpl) GetUpdated(sessionID uint64, keepInCache bool) (*Session, error) { session, err := s.getFromDB(sessionID) if err != nil { return nil, err } + if !keepInCache { + return session, nil + } if err := s.cache.Set(session); err != nil { ctx := context.WithValue(context.Background(), "sessionID", sessionID) s.log.Warn(ctx, "failed to cache session: %s", err) diff --git a/backend/pkg/spot/builder.go b/backend/pkg/spot/builder.go index 14ae61365..209777f46 100644 --- a/backend/pkg/spot/builder.go +++ b/backend/pkg/spot/builder.go @@ -1,19 +1,19 @@ package spot import ( - "openreplay/backend/pkg/metrics/web" - "openreplay/backend/pkg/server/tracer" "time" "openreplay/backend/internal/config/spot" "openreplay/backend/pkg/db/postgres/pool" "openreplay/backend/pkg/flakeid" "openreplay/backend/pkg/logger" + "openreplay/backend/pkg/metrics/web" "openreplay/backend/pkg/objectstorage/store" "openreplay/backend/pkg/server/api" "openreplay/backend/pkg/server/auth" "openreplay/backend/pkg/server/keys" "openreplay/backend/pkg/server/limiter" + "openreplay/backend/pkg/server/tracer" spotAPI "openreplay/backend/pkg/spot/api" "openreplay/backend/pkg/spot/service" "openreplay/backend/pkg/spot/transcoder" diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 9a62041d0..2db51f61c 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -184,47 +184,35 @@ Pipfile.lock /build.sh /build_alerts.sh /build_crons.sh -/chalicelib/core/alerts.py /chalicelib/core/announcements.py /chalicelib/core/assist.py /chalicelib/core/authorizers.py -/chalicelib/core/autocomplete.py +/chalicelib/core/autocomplete/* /chalicelib/core/canvas.py -/chalicelib/core/collaboration_base.py -/chalicelib/core/collaboration_msteams.py -/chalicelib/core/collaboration_slack.py +/chalicelib/core/collaborations/* /chalicelib/core/countries.py +/chalicelib/core/metrics.py +/chalicelib/core/custom_metrics.py /chalicelib/core/custom_metrics_predefined.py /chalicelib/core/dashboards.py /chalicelib/core/errors_favorite.py /chalicelib/core/events_mobile.py /chalicelib/core/feature_flags.py /chalicelib/core/funnels.py -/chalicelib/core/integration_base.py -/chalicelib/core/integration_base_issue.py -/chalicelib/core/integration_github.py -/chalicelib/core/integration_github_issue.py -/chalicelib/core/integration_jira_cloud.py -/chalicelib/core/integration_jira_cloud_issue.py -/chalicelib/core/integrations_manager.py +/chalicelib/core/issue_tracking/*.py /chalicelib/core/issues.py /chalicelib/core/jobs.py -/chalicelib/core/log_tool_bugsnag.py -/chalicelib/core/log_tool_cloudwatch.py -/chalicelib/core/log_tool_datadog.py -/chalicelib/core/log_tool_elasticsearch.py -/chalicelib/core/log_tool_newrelic.py -/chalicelib/core/log_tool_rollbar.py -/chalicelib/core/log_tool_sentry.py -/chalicelib/core/log_tool_stackdriver.py -/chalicelib/core/log_tool_sumologic.py +/chalicelib/core/log_tools/*.py /chalicelib/core/metadata.py /chalicelib/core/mobile.py -/chalicelib/core/performance_event.py /chalicelib/core/saved_search.py -/chalicelib/core/sessions.py -/chalicelib/core/sessions_assignments.py -/chalicelib/core/sessions_mobs.py +/chalicelib/core/sessions/sessions.py +/chalicelib/core/sessions/sessions_ch.py +/chalicelib/core/sessions/sessions_assignments.py +/chalicelib/core/sessions/sessions_metas.py +/chalicelib/core/sessions/sessions_mobs.py +/chalicelib/core/sessions/performance_event.py +/chalicelib/core/sessions/unprocessed_sessions.py /chalicelib/core/significance.py /chalicelib/core/socket_ios.py /chalicelib/core/sourcemaps.py @@ -276,6 +264,15 @@ Pipfile.lock /chalicelib/utils/or_cache/ /routers/subs/health.py /chalicelib/core/spot.py -/chalicelib/core/unprocessed_sessions.py /run-db_init-dev.sh /.dev/ +/chalicelib/core/product_anaytics2.py +/chalicelib/utils/ch_client.py +/chalicelib/utils/ch_client_exp.py +/routers/subs/product_anaytics.py +/chalicelib/core/alerts/__init__.py +/chalicelib/core/alerts/alerts.py +/chalicelib/core/alerts/alerts_processor.py +/chalicelib/core/alerts/alerts_processor_ch.py +/chalicelib/core/alerts/alerts_listener.py +/chalicelib/core/alerts/modules/helpers.py diff --git a/ee/api/Pipfile b/ee/api/Pipfile index a3a45e591..a32b99a8e 100644 --- a/ee/api/Pipfile +++ b/ee/api/Pipfile @@ -4,26 +4,27 @@ verify_ssl = true name = "pypi" [packages] -urllib3 = "==1.26.16" +urllib3 = "==2.2.3" requests = "==2.32.3" -boto3 = "==1.35.60" -pyjwt = "==2.9.0" +boto3 = "==1.35.76" +pyjwt = "==2.10.1" psycopg2-binary = "==2.9.10" -psycopg = {extras = ["binary", "pool"], version = "==3.2.3"} +psycopg = {extras = ["pool", "binary"], version = "==3.2.3"} +clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} +clickhouse-connect = "==0.8.9" elasticsearch = "==8.16.0" jira = "==3.8.0" cachetools = "==5.5.0" -fastapi = "==0.115.5" -uvicorn = {extras = ["standard"], version = "==0.32.0"} +fastapi = "==0.115.6" +uvicorn = {extras = ["standard"], version = "==0.32.1"} gunicorn = "==23.0.0" python-decouple = "==3.8" -pydantic = {extras = ["email"], version = "==2.9.2"} -apscheduler = "==3.10.4" -clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} +pydantic = {extras = ["email"], version = "==2.10.3"} +apscheduler = "==3.11.0" +redis = "==5.2.1" python3-saml = "==1.16.0" python-multipart = "==0.0.17" -redis = "==5.2.0" -azure-storage-blob = "==12.23.1" +azure-storage-blob = "==12.24.0" [dev-packages] diff --git a/ee/api/app.py b/ee/api/app.py index 7a24c7683..7ad085882 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -17,11 +17,11 @@ from starlette.responses import StreamingResponse, JSONResponse from chalicelib.core import traces from chalicelib.utils import events_queue from chalicelib.utils import helper -from chalicelib.utils import pg_client +from chalicelib.utils import pg_client, ch_client from crons import core_crons, ee_crons, core_dynamic_crons from routers import core, core_dynamic from routers import ee -from routers.subs import insights, metrics, v1_api, health, usability_tests, spot +from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics from routers.subs import v1_api_ee if config("ENABLE_SSO", cast=bool, default=True): @@ -48,6 +48,7 @@ async def lifespan(app: FastAPI): app.schedule = AsyncIOScheduler() app.queue_system = queue.Queue() await pg_client.init() + await ch_client.init() await events_queue.init() app.schedule.start() @@ -149,6 +150,10 @@ app.include_router(spot.public_app) app.include_router(spot.app) app.include_router(spot.app_apikey) +app.include_router(product_anaytics.public_app) +app.include_router(product_anaytics.app) +app.include_router(product_anaytics.app_apikey) + if config("ENABLE_SSO", cast=bool, default=True): app.include_router(saml.public_app) app.include_router(saml.app) diff --git a/ee/api/chalicelib/core/__init__.py b/ee/api/chalicelib/core/__init__.py index 88c8528a7..3af66a9c1 100644 --- a/ee/api/chalicelib/core/__init__.py +++ b/ee/api/chalicelib/core/__init__.py @@ -1,46 +1,33 @@ -from decouple import config import logging -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) +from decouple import config -from . import sessions as sessions_legacy - -if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - logging.info(">>> Using experimental sessions search") - from . import sessions_exp as sessions -else: - from . import sessions as sessions +logger = logging.getLogger(__name__) +from . import custom_metrics as custom_metrics_legacy +from . import custom_metrics_ee as custom_metrics +from . import metrics_ch as metrics +from . import metrics as metrics_legacy if config("EXP_AUTOCOMPLETE", cast=bool, default=False): - logging.info(">>> Using experimental autocomplete") - from . import autocomplete_exp as autocomplete + logger.info(">>> Using experimental autocomplete") else: from . import autocomplete as autocomplete if config("EXP_ERRORS_SEARCH", cast=bool, default=False): - logging.info(">>> Using experimental error search") + logger.info(">>> Using experimental error search") from . import errors as errors_legacy from . import errors_exp as errors if config("EXP_ERRORS_GET", cast=bool, default=False): - logging.info(">>> Using experimental error get") + logger.info(">>> Using experimental error get") else: from . import errors as errors if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False): - logging.info(">>> Using experimental sessions search for metrics") - -if config("EXP_ALERTS", cast=bool, default=False): - logging.info(">>> Using experimental alerts") - from . import alerts_processor_exp as alerts_processor -else: - from . import alerts_processor as alerts_processor + logger.info(">>> Using experimental sessions search for metrics") if config("EXP_FUNNELS", cast=bool, default=False): - logging.info(">>> Using experimental funnels") - if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from . import sessions as sessions_legacy - + logger.info(">>> Using experimental funnels") from . import significance_exp as significance else: from . import significance as significance diff --git a/ee/api/chalicelib/core/alerts/modules/__init__.py b/ee/api/chalicelib/core/alerts/modules/__init__.py new file mode 100644 index 000000000..f70bca7b7 --- /dev/null +++ b/ee/api/chalicelib/core/alerts/modules/__init__.py @@ -0,0 +1,16 @@ +from decouple import config + +TENANT_ID = "tenant_id" +if config("EXP_ALERTS", cast=bool, default=False): + if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): + from chalicelib.core.sessions import sessions + else: + from chalicelib.core.sessions import sessions_ch as sessions +else: + if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): + from chalicelib.core.sessions import sessions_ch as sessions + else: + from chalicelib.core.sessions import sessions + + +from . import helpers as alert_helpers diff --git a/ee/api/chalicelib/core/alerts_listener.py b/ee/api/chalicelib/core/alerts_listener.py deleted file mode 100644 index 3f216a420..000000000 --- a/ee/api/chalicelib/core/alerts_listener.py +++ /dev/null @@ -1,32 +0,0 @@ -from chalicelib.utils import pg_client, helper - - -def get_all_alerts(): - with pg_client.PostgresClient(long_query=True) as cur: - query = """SELECT tenant_id, - alert_id, - projects.project_id, - projects.name AS project_name, - detection_method, - query, - options, - (EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at, - alerts.name, - alerts.series_id, - filter, - change, - COALESCE(metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count', - query ->> 'left') AS series_name - FROM public.alerts - INNER JOIN projects USING (project_id) - LEFT JOIN metric_series USING (series_id) - LEFT JOIN metrics USING (metric_id) - WHERE alerts.deleted_at ISNULL - AND alerts.active - AND projects.active - AND projects.deleted_at ISNULL - AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL) - ORDER BY alerts.created_at;""" - cur.execute(query=query) - all_alerts = helper.list_to_camel_case(cur.fetchall()) - return all_alerts diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py deleted file mode 100644 index 629a18a37..000000000 --- a/ee/api/chalicelib/core/alerts_processor.py +++ /dev/null @@ -1,242 +0,0 @@ -import decimal -import logging - -from decouple import config -from pydantic_core._pydantic_core import ValidationError - -import schemas -from chalicelib.core import alerts -from chalicelib.core import alerts_listener -from chalicelib.utils import pg_client -from chalicelib.utils.TimeUTC import TimeUTC - -if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from chalicelib.core import sessions_legacy as sessions -else: - from chalicelib.core import sessions - -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) - -LeftToDb = { - schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"}, - schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"}, - schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"}, - schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "AVG(NULLIF(dom_building_time,0))"}, - schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"}, - schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "AVG(NULLIF(response_time,0))"}, - schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "AVG(NULLIF(first_paint_time,0))"}, - schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: { - "table": "events.pages INNER JOIN public.sessions USING(session_id)", - "formula": "AVG(NULLIF(visually_complete,0))"}, - schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: { - "table": "public.sessions", - "formula": "COUNT(DISTINCT session_id)", - "condition": "errors_count > 0 AND duration>0"}, - schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: { - "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", - "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False}, - schemas.AlertColumn.ERRORS__BACKEND__COUNT: { - "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", - "formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False}, -} - -# This is the frequency of execution for each threshold -TimeInterval = { - 15: 3, - 30: 5, - 60: 10, - 120: 20, - 240: 30, - 1440: 60, -} - - -def can_check(a) -> bool: - now = TimeUTC.now() - - repetitionBase = a["options"]["currentPeriod"] \ - if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \ - and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \ - else a["options"]["previousPeriod"] - - if TimeInterval.get(repetitionBase) is None: - logging.error(f"repetitionBase: {repetitionBase} NOT FOUND") - return False - - return (a["options"]["renotifyInterval"] <= 0 or - a["options"].get("lastNotification") is None or - a["options"]["lastNotification"] <= 0 or - ((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \ - and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000 - - -def Build(a): - now = TimeUTC.now() - params = {"project_id": a["projectId"], "now": now} - full_args = {} - j_s = True - main_table = "" - if a["seriesId"] is not None: - a["filter"]["sort"] = "session_id" - a["filter"]["order"] = schemas.SortOrderType.DESC - a["filter"]["startDate"] = 0 - a["filter"]["endDate"] = TimeUTC.now() - try: - data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"]) - except ValidationError: - logging.warning("Validation error for:") - logging.warning(a["filter"]) - raise - - full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False, - issue=None, project_id=a["projectId"], user_id=None, - favorite_only=False) - subQ = f"""SELECT COUNT(session_id) AS value - {query_part}""" - else: - colDef = LeftToDb[a["query"]["left"]] - subQ = f"""SELECT {colDef["formula"]} AS value - FROM {colDef["table"]} - WHERE project_id = %(project_id)s - {"AND " + colDef["condition"] if colDef.get("condition") else ""}""" - j_s = colDef.get("joinSessions", True) - main_table = colDef["table"] - is_ss = main_table == "public.sessions" - q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid""" - - if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD: - if a["seriesId"] is not None: - q += f""" FROM ({subQ}) AS stat""" - else: - q += f""" FROM ({subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""} - {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat""" - params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000} - else: - if a["change"] == schemas.AlertDetectionType.CHANGE: - if a["seriesId"] is not None: - sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s") - sub1 = f"SELECT (({subQ})-({sub2})) AS value" - q += f" FROM ( {sub1} ) AS stat" - params = {**params, **full_args, - "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000, - "timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000} - else: - sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""} - {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}""" - params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000 - sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""} - {"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}""" - params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000 - sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value" - q += f" FROM ( {sub1} ) AS stat" - - else: - if a["seriesId"] is not None: - sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s") - sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value" - q += f" FROM ({sub1}) AS stat" - params = {**params, **full_args, - "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000, - "timestamp_sub2": TimeUTC.now() \ - - (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \ - * 60 * 1000} - else: - sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""} - {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}""" - params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000 - sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""} - {"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}""" - params["timestamp_sub2"] = TimeUTC.now() \ - - (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000 - sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value" - q += f" FROM ({sub1}) AS stat" - - return q, params - - -def process(): - notifications = [] - all_alerts = alerts_listener.get_all_alerts() - with pg_client.PostgresClient() as cur: - for alert in all_alerts: - if can_check(alert): - query, params = Build(alert) - try: - query = cur.mogrify(query, params) - except Exception as e: - logging.error( - f"!!!Error while building alert query for alertId:{alert['alertId']} name: {alert['name']}") - logging.error(e) - continue - logging.debug(alert) - logging.debug(query) - try: - cur.execute(query) - result = cur.fetchone() - if result["valid"]: - logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}") - notifications.append(generate_notification(alert, result)) - except Exception as e: - logging.error( - f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") - logging.error(query) - logging.error(e) - cur = cur.recreate(rollback=True) - if len(notifications) > 0: - cur.execute( - cur.mogrify(f"""UPDATE public.alerts - SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb - WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])})) - if len(notifications) > 0: - alerts.process_notifications(notifications) - - -def __format_value(x): - if x % 1 == 0: - x = int(x) - else: - x = round(x, 2) - return f"{x:,}" - - -def generate_notification(alert, result): - left = __format_value(result['value']) - right = __format_value(alert['query']['right']) - return { - "alertId": alert["alertId"], - "tenantId": alert["tenantId"], - "title": alert["name"], - "description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).", - "buttonText": "Check metrics for more details", - "buttonUrl": f"/{alert['projectId']}/metrics", - "imageUrl": None, - "projectId": alert["projectId"], - "projectName": alert["projectName"], - "options": {"source": "ALERT", "sourceId": alert["alertId"], - "sourceMeta": alert["detectionMethod"], - "message": alert["options"]["message"], "projectId": alert["projectId"], - "data": {"title": alert["name"], - "limitValue": alert["query"]["right"], - "actualValue": float(result["value"]) \ - if isinstance(result["value"], decimal.Decimal) \ - else result["value"], - "operator": alert["query"]["operator"], - "trigger": alert["query"]["left"], - "alertId": alert["alertId"], - "detectionMethod": alert["detectionMethod"], - "currentPeriod": alert["options"]["currentPeriod"], - "previousPeriod": alert["options"]["previousPeriod"], - "createdAt": TimeUTC.now()}}, - } diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py deleted file mode 100644 index 0ac6d5405..000000000 --- a/ee/api/chalicelib/core/custom_metrics.py +++ /dev/null @@ -1,703 +0,0 @@ -import json -import logging - -from decouple import config -from fastapi import HTTPException, status - -import schemas -from chalicelib.core import funnels, issues, heatmaps, sessions_insights, sessions_mobs, sessions_favorite, \ - product_analytics, custom_metrics_predefined -from chalicelib.utils import helper, pg_client -from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.storage import extra - -if config("EXP_ERRORS_SEARCH", cast=bool, default=False): - logging.info(">>> Using experimental error search") - from . import errors_exp as errors -else: - from . import errors as errors - -if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False): - from chalicelib.core import sessions -else: - from chalicelib.core import sessions_legacy as sessions - -logger = logging.getLogger(__name__) - - -# TODO: refactor this to split -# timeseries / -# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs -# remove "table of" calls from this function -def __try_live(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, - view_type=data.view_type, metric_type=data.metric_type, - metric_of=data.metric_of, metric_value=data.metric_value)) - - return results - - -def __get_table_of_series(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, - metric_of=data.metric_of, metric_value=data.metric_value, - metric_format=data.metric_format)) - - return results - - -def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel, user_id: int = None): - if len(data.series) == 0: - return { - "stages": [], - "totalDropDueToIssues": 0 - } - - # return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, - # data=data.series[0].filter, - # metric_format=data.metric_format) - return funnels.get_simple_funnel(project=project, - data=data.series[0].filter, - metric_format=data.metric_format) - - -def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): - if len(data.series) == 0: - return { - "total": 0, - "errors": [] - } - return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id) - - -def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): - if len(data.series) == 0: - logger.debug("empty series") - return { - "total": 0, - "sessions": [] - } - return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id) - - -def __get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.CardHeatMap, - include_mobs: bool = True): - if len(data.series) == 0: - return None - data.series[0].filter.filters += data.series[0].filter.events - data.series[0].filter.events = [] - return heatmaps.search_short_session(project_id=project.project_id, user_id=user_id, - data=schemas.HeatMapSessionsSearch( - **data.series[0].filter.model_dump()), - include_mobs=include_mobs) - - -# EE only -def __get_insights_chart(project: schemas.ProjectContext, data: schemas.CardInsights, user_id: int = None): - return sessions_insights.fetch_selected(project_id=project.project_id, - data=schemas.GetInsightsSchema(startTimestamp=data.startTimestamp, - endTimestamp=data.endTimestamp, - metricValue=data.metric_value, - series=data.series)) - - -def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, data: schemas.CardPathAnalysis): - if len(data.series) == 0: - data.series.append( - schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp)) - elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema): - data.series[0].filter = schemas.PathAnalysisSchema() - - return product_analytics.path_analysis(project_id=project.project_id, data=data) - - -def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None): - series_charts = __try_live(project_id=project.project_id, data=data) - results = [{}] * len(series_charts[0]) - for i in range(len(results)): - for j, series_chart in enumerate(series_charts): - results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], - data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]} - return results - - -def not_supported(**args): - raise Exception("not supported") - - -def __get_table_of_user_ids(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_sessions(project: schemas.ProjectContext, data: schemas.CardTable, user_id): - return __get_sessions_list(project=project, user_id=user_id, data=data) - - -def __get_table_of_errors(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int): - return __get_errors_list(project=project, user_id=user_id, data=data) - - -def __get_table_of_issues(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_browsers(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_devises(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_countries(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_urls(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_referrers(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_of_requests(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int = None): - return __get_table_of_series(project_id=project.project_id, data=data) - - -def __get_table_chart(project: schemas.ProjectContext, data: schemas.CardTable, user_id: int): - supported = { - schemas.MetricOfTable.SESSIONS: __get_table_of_sessions, - schemas.MetricOfTable.ERRORS: __get_table_of_errors, - schemas.MetricOfTable.USER_ID: __get_table_of_user_ids, - schemas.MetricOfTable.ISSUES: __get_table_of_issues, - schemas.MetricOfTable.USER_BROWSER: __get_table_of_browsers, - schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises, - schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries, - schemas.MetricOfTable.VISITED_URL: __get_table_of_urls, - schemas.MetricOfTable.REFERRER: __get_table_of_referrers, - schemas.MetricOfTable.FETCH: __get_table_of_requests - } - return supported.get(data.metric_of, not_supported)(project=project, data=data, user_id=user_id) - - -def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id: int): - if data.is_predefined: - return custom_metrics_predefined.get_metric(key=data.metric_of, - project_id=project.project_id, - data=data.model_dump()) - - supported = { - schemas.MetricType.TIMESERIES: __get_timeseries_chart, - schemas.MetricType.TABLE: __get_table_chart, - schemas.MetricType.HEAT_MAP: __get_heat_map_chart, - schemas.MetricType.FUNNEL: __get_funnel_chart, - schemas.MetricType.INSIGHTS: __get_insights_chart, - schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart - } - return supported.get(data.metric_type, not_supported)(project=project, data=data, user_id=user_id) - - -def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): - return None - results = [] - for s in data.series: - results.append({"seriesId": s.series_id, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results - - -def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema): - results = [] - if len(data.series) == 0: - return results - for s in data.series: - if len(data.filters) > 0: - s.filter.filters += data.filters - s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True)) - - results.append({"seriesId": None, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) - - return results - - -def get_issues(project: schemas.ProjectContext, user_id: int, data: schemas.CardSchema): - if data.is_predefined: - return not_supported() - if data.metric_of == schemas.MetricOfTable.ISSUES: - return __get_table_of_issues(project=project, user_id=user_id, data=data) - supported = { - schemas.MetricType.TIMESERIES: not_supported, - schemas.MetricType.TABLE: not_supported, - schemas.MetricType.HEAT_MAP: not_supported, - schemas.MetricType.INSIGHTS: not_supported, - schemas.MetricType.PATH_ANALYSIS: not_supported, - } - return supported.get(data.metric_type, not_supported)() - - -def __get_path_analysis_card_info(data: schemas.CardPathAnalysis): - r = {"start_point": [s.model_dump() for s in data.start_point], - "start_type": data.start_type, - "excludes": [e.model_dump() for e in data.excludes], - "hideExcess": data.hide_excess} - return r - - -def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False): - with pg_client.PostgresClient() as cur: - session_data = None - if data.metric_type == schemas.MetricType.HEAT_MAP: - if data.session_id is not None: - session_data = {"sessionId": data.session_id} - else: - session_data = __get_heat_map_chart(project=project, user_id=user_id, - data=data, include_mobs=False) - if session_data is not None: - session_data = {"sessionId": session_data["sessionId"]} - - if session_data is not None: - # for EE only - keys = sessions_mobs. \ - __get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"]) - keys += sessions_mobs. \ - __get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions - tag = config('RETENTION_L_VALUE', default='vault') - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") - logger.error(str(e)) - - _data = {"session_data": json.dumps(session_data) if session_data is not None else None} - for i, s in enumerate(data.series): - for k in s.model_dump().keys(): - _data[f"{k}_{i}"] = s.__getattribute__(k) - _data[f"index_{i}"] = i - _data[f"filter_{i}"] = s.filter.json() - series_len = len(data.series) - params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data, - "default_config": json.dumps(data.default_config.model_dump()), "card_info": None} - if data.metric_type == schemas.MetricType.PATH_ANALYSIS: - params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) - - query = """INSERT INTO metrics (project_id, user_id, name, is_public, - view_type, metric_type, metric_of, metric_value, - metric_format, default_config, thumbnail, data, - card_info) - VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, - %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, - %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, - %(card_info)s) - RETURNING metric_id""" - if len(data.series) > 0: - query = f"""WITH m AS ({query}) - INSERT INTO metric_series(metric_id, index, name, filter) - VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)" - for i in range(series_len)])} - RETURNING metric_id;""" - - query = cur.mogrify(query, params) - cur.execute(query) - r = cur.fetchone() - if dashboard: - return r["metric_id"] - return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)} - - -def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): - metric: dict = get_card(metric_id=metric_id, project_id=project_id, - user_id=user_id, flatten=False, include_data=True) - if metric is None: - return None - series_ids = [r["seriesId"] for r in metric["series"]] - n_series = [] - d_series_ids = [] - u_series = [] - u_series_ids = [] - params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name, - "user_id": user_id, "project_id": project_id, "view_type": data.view_type, - "metric_type": data.metric_type, "metric_of": data.metric_of, - "metric_value": data.metric_value, "metric_format": data.metric_format, - "config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail} - for i, s in enumerate(data.series): - prefix = "u_" - if s.index is None: - s.index = i - if s.series_id is None or s.series_id not in series_ids: - n_series.append({"i": i, "s": s}) - prefix = "n_" - else: - u_series.append({"i": i, "s": s}) - u_series_ids.append(s.series_id) - ns = s.model_dump() - for k in ns.keys(): - if k == "filter": - ns[k] = json.dumps(ns[k]) - params[f"{prefix}{k}_{i}"] = ns[k] - for i in series_ids: - if i not in u_series_ids: - d_series_ids.append(i) - params["d_series_ids"] = tuple(d_series_ids) - params["card_info"] = None - params["session_data"] = json.dumps(metric["data"]) - if data.metric_type == schemas.MetricType.PATH_ANALYSIS: - params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) - elif data.metric_type == schemas.MetricType.HEAT_MAP: - if data.session_id is not None: - params["session_data"] = json.dumps({"sessionId": data.session_id}) - elif metric.get("data") and metric["data"].get("sessionId"): - params["session_data"] = json.dumps({"sessionId": metric["data"]["sessionId"]}) - - with pg_client.PostgresClient() as cur: - sub_queries = [] - if len(n_series) > 0: - sub_queries.append(f"""\ - n AS (INSERT INTO metric_series (metric_id, index, name, filter) - VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)" - for s in n_series])} - RETURNING 1)""") - if len(u_series) > 0: - sub_queries.append(f"""\ - u AS (UPDATE metric_series - SET name=series.name, - filter=series.filter, - index=series.index - FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)" - for s in u_series])}) AS series(series_id, index, name, filter) - WHERE metric_series.metric_id =%(metric_id)s AND metric_series.series_id=series.series_id - RETURNING 1)""") - if len(d_series_ids) > 0: - sub_queries.append("""\ - d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s - RETURNING 1)""") - query = cur.mogrify(f"""\ - {"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)} - UPDATE metrics - SET name = %(name)s, is_public= %(is_public)s, - view_type= %(view_type)s, metric_type= %(metric_type)s, - metric_of= %(metric_of)s, metric_value= %(metric_value)s, - metric_format= %(metric_format)s, - edited_at = timezone('utc'::text, now()), - default_config = %(config)s, - thumbnail = %(thumbnail)s, - card_info = %(card_info)s, - data = %(session_data)s - WHERE metric_id = %(metric_id)s - AND project_id = %(project_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING metric_id;""", params) - cur.execute(query) - return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id) - - -def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False): - constraints = ["metrics.project_id = %(project_id)s", - "metrics.deleted_at ISNULL"] - params = {"project_id": project_id, "user_id": user_id, - "offset": (data.page - 1) * data.limit, - "limit": data.limit, } - if data.mine_only: - constraints.append("user_id = %(user_id)s") - else: - constraints.append("(user_id = %(user_id)s OR metrics.is_public)") - if data.shared_only: - constraints.append("is_public") - - if data.query is not None and len(data.query) > 0: - constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)") - params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator.CONTAINS) - with pg_client.PostgresClient() as cur: - sub_join = "" - if include_series: - sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series - FROM metric_series - WHERE metric_series.metric_id = metrics.metric_id - AND metric_series.deleted_at ISNULL - ) AS metric_series ON (TRUE)""" - query = cur.mogrify( - f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at, - metric_type, metric_of, metric_format, metric_value, view_type, is_pinned, - dashboards, owner_email, owner_name, default_config AS config, thumbnail - FROM metrics - {sub_join} - LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards - FROM (SELECT DISTINCT dashboard_id, name, is_public - FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id) - WHERE deleted_at ISNULL - AND dashboard_widgets.metric_id = metrics.metric_id - AND project_id = %(project_id)s - AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards - ) AS connected_dashboards ON (TRUE) - LEFT JOIN LATERAL (SELECT email AS owner_email, name AS owner_name - FROM users - WHERE deleted_at ISNULL - AND users.user_id = metrics.user_id - ) AS owner ON (TRUE) - WHERE {" AND ".join(constraints)} - ORDER BY created_at {data.order.value} - LIMIT %(limit)s OFFSET %(offset)s;""", params) - logger.debug("---------") - logger.debug(query) - logger.debug("---------") - cur.execute(query) - rows = cur.fetchall() - if include_series: - for r in rows: - for s in r["series"]: - s["filter"] = helper.old_search_payload_to_flat(s["filter"]) - else: - for r in rows: - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"]) - rows = helper.list_to_camel_case(rows) - return rows - - -def get_all(project_id, user_id): - default_search = schemas.SearchCardsSchema() - rows = search_all(project_id=project_id, user_id=user_id, data=default_search) - result = rows - while len(rows) == default_search.limit: - default_search.page += 1 - rows = search_all(project_id=project_id, user_id=user_id, data=default_search) - result += rows - - return result - - -def delete_card(project_id, metric_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.metrics - SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) - WHERE project_id = %(project_id)s - AND metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING data;""", - {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) - ) - # for EE only - row = cur.fetchone() - if row: - if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]): - keys = sessions_mobs. \ - __get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"]) - keys += sessions_mobs. \ - __get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions - tag = config('RETENTION_D_VALUE', default='default') - for k in keys: - try: - extra.tag_session(file_key=k, tag_value=tag) - except Exception as e: - logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") - logger.error(str(e)) - return {"state": "success"} - - -def __get_path_analysis_attributes(row): - card_info = row.pop("cardInfo") - row["excludes"] = card_info.get("excludes", []) - row["startPoint"] = card_info.get("startPoint", []) - row["startType"] = card_info.get("startType", "start") - row["hideExcess"] = card_info.get("hideExcess", False) - return row - - -def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type, - view_type, metric_of, metric_value, metric_format, is_pinned, default_config, - default_config AS config,series, dashboards, owner_email, card_info - {',data' if include_data else ''} - FROM metrics - LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series - FROM metric_series - WHERE metric_series.metric_id = metrics.metric_id - AND metric_series.deleted_at ISNULL - ) AS metric_series ON (TRUE) - LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards - FROM (SELECT dashboard_id, name, is_public - FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id) - WHERE deleted_at ISNULL - AND project_id = %(project_id)s - AND ((dashboards.user_id = %(user_id)s OR is_public)) - AND metric_id = %(metric_id)s) AS connected_dashboards - ) AS connected_dashboards ON (TRUE) - LEFT JOIN LATERAL (SELECT email AS owner_email - FROM users - WHERE deleted_at ISNULL - AND users.user_id = metrics.user_id - ) AS owner ON (TRUE) - WHERE metrics.project_id = %(project_id)s - AND metrics.deleted_at ISNULL - AND (metrics.user_id = %(user_id)s OR metrics.is_public) - AND metrics.metric_id = %(metric_id)s - ORDER BY created_at;""", - {"metric_id": metric_id, "project_id": project_id, "user_id": user_id} - ) - cur.execute(query) - row = cur.fetchone() - if row is None: - return None - row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) - row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"]) - if flatten: - for s in row["series"]: - s["filter"] = helper.old_search_payload_to_flat(s["filter"]) - row = helper.dict_to_camel_case(row) - if row["metricType"] == schemas.MetricType.PATH_ANALYSIS: - row = __get_path_analysis_attributes(row=row) - return row - - -def get_series_for_alert(project_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """SELECT series_id AS value, - metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name, - 'count' AS unit, - FALSE AS predefined, - metric_id, - series_id - FROM metric_series - INNER JOIN metrics USING (metric_id) - WHERE metrics.deleted_at ISNULL - AND metrics.project_id = %(project_id)s - AND metrics.metric_type = 'timeseries' - AND (user_id = %(user_id)s OR is_public) - ORDER BY name;""", - {"project_id": project_id, "user_id": user_id} - ) - ) - rows = cur.fetchall() - return helper.list_to_camel_case(rows) - - -def change_state(project_id, metric_id, user_id, status): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.metrics - SET active = %(status)s - WHERE metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public);""", - {"metric_id": metric_id, "status": status, "user_id": user_id}) - ) - return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id) - - -def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, - data: schemas.CardSessionsSchema - # , range_value=None, start_date=None, end_date=None - ): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - # if metric is None: - # return None - if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): - return None - for s in data.series: - s.filter.startTimestamp = data.startTimestamp - s.filter.endTimestamp = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page - issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) - issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", []) - issue = None - for i in issues_list: - if i.get("issueId", "") == issue_id: - issue = i - break - if issue is None: - issue = issues.get(project_id=project_id, issue_id=issue_id) - if issue is not None: - issue = {**issue, - "affectedSessions": 0, - "affectedUsers": 0, - "conversionImpact": 0, - "lostConversions": 0, - "unaffectedSessions": 0} - return {"seriesId": s.series_id, "seriesName": s.name, - "sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, - issue=issue, data=s.filter) - if issue is not None else {"total": 0, "sessions": []}, - "issue": issue} - - -def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema): - raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True) - - if raw_metric is None: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") - raw_metric["startTimestamp"] = data.startTimestamp - raw_metric["endTimestamp"] = data.endTimestamp - raw_metric["limit"] = data.limit - raw_metric["density"] = data.density - metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) - - if metric.is_predefined: - return custom_metrics_predefined.get_metric(key=metric.metric_of, - project_id=project.project_id, - data=data.model_dump()) - elif metric.metric_type == schemas.MetricType.HEAT_MAP: - if raw_metric["data"] and raw_metric["data"].get("sessionId"): - return heatmaps.get_selected_session(project_id=project.project_id, - session_id=raw_metric["data"]["sessionId"]) - else: - return heatmaps.search_short_session(project_id=project.project_id, - data=schemas.HeatMapSessionsSearch(**metric.model_dump()), - user_id=user_id) - - return get_chart(project=project, data=metric, user_id=user_id) - - -def card_exists(metric_id, project_id, user_id) -> bool: - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""SELECT 1 - FROM metrics - LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards - FROM (SELECT dashboard_id, name, is_public - FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id) - WHERE deleted_at ISNULL - AND project_id = %(project_id)s - AND ((dashboards.user_id = %(user_id)s OR is_public)) - AND metric_id = %(metric_id)s) AS connected_dashboards - ) AS connected_dashboards ON (TRUE) - LEFT JOIN LATERAL (SELECT email AS owner_email - FROM users - WHERE deleted_at ISNULL - AND users.user_id = metrics.user_id - ) AS owner ON (TRUE) - WHERE metrics.project_id = %(project_id)s - AND metrics.deleted_at ISNULL - AND (metrics.user_id = %(user_id)s OR metrics.is_public) - AND metrics.metric_id = %(metric_id)s - ORDER BY created_at;""", - {"metric_id": metric_id, "project_id": project_id, "user_id": user_id} - ) - cur.execute(query) - row = cur.fetchone() - return row is not None diff --git a/ee/api/chalicelib/core/custom_metrics_ee.py b/ee/api/chalicelib/core/custom_metrics_ee.py new file mode 100644 index 000000000..dcfadfb0f --- /dev/null +++ b/ee/api/chalicelib/core/custom_metrics_ee.py @@ -0,0 +1,236 @@ +import json +import logging + +from decouple import config +from fastapi import HTTPException, status +from .custom_metrics import * +import schemas +from chalicelib.core import funnels, issues, heatmaps, sessions_mobs, sessions_favorite, \ + product_analytics, custom_metrics_predefined +from chalicelib.utils import helper, pg_client +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.storage import extra + +# TODO: fix this import +from . import errors as errors +# if config("EXP_ERRORS_SEARCH", cast=bool, default=False): +# logging.info(">>> Using experimental error search") +# from . import errors_exp as errors +# else: +# from . import errors as errors + +if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False): + from chalicelib.core import sessions +else: + from chalicelib.core import sessions_legacy as sessions + +logger = logging.getLogger(__name__) + + +# TODO: refactor this to split +# timeseries / +# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs +# remove "table of" calls from this function +def __try_live(project_id, data: schemas.CardSchema): + results = [] + for i, s in enumerate(data.series): + results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, + view_type=data.view_type, metric_type=data.metric_type, + metric_of=data.metric_of, metric_value=data.metric_value)) + + return results + + +def __get_table_of_series(project_id, data: schemas.CardSchema): + results = [] + for i, s in enumerate(data.series): + results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density, + metric_of=data.metric_of, metric_value=data.metric_value, + metric_format=data.metric_format)) + + return results + + +def __get_errors_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): + if len(data.series) == 0: + return { + "total": 0, + "errors": [] + } + return errors.search(data.series[0].filter, project_id=project.project_id, user_id=user_id) + + +def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas.CardSchema): + if len(data.series) == 0: + logger.debug("empty series") + return { + "total": 0, + "sessions": [] + } + return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id) + + +def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): + # No need for this because UI is sending the full payload + # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + # if card is None: + # return None + # metric: schemas.CardSchema = schemas.CardSchema(**card) + # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) + if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): + return None + results = [] + for s in data.series: + results.append({"seriesId": s.series_id, "seriesName": s.name, + **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) + + return results + + +def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema): + results = [] + if len(data.series) == 0: + return results + for s in data.series: + if len(data.filters) > 0: + s.filter.filters += data.filters + s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True)) + + results.append({"seriesId": None, "seriesName": s.name, + **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) + + return results + + +def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSchema, dashboard=False): + with pg_client.PostgresClient() as cur: + session_data = None + if data.metric_type == schemas.MetricType.HEAT_MAP: + if data.session_id is not None: + session_data = {"sessionId": data.session_id} + else: + session_data = __get_heat_map_chart(project=project, user_id=user_id, + data=data, include_mobs=False) + if session_data is not None: + session_data = {"sessionId": session_data["sessionId"]} + + if session_data is not None: + # for EE only + keys = sessions_mobs. \ + __get_mob_keys(project_id=project.project_id, session_id=session_data["sessionId"]) + keys += sessions_mobs. \ + __get_mob_keys_deprecated(session_id=session_data["sessionId"]) # To support old sessions + tag = config('RETENTION_L_VALUE', default='vault') + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") + logger.error(str(e)) + + _data = {"session_data": json.dumps(session_data) if session_data is not None else None} + for i, s in enumerate(data.series): + for k in s.model_dump().keys(): + _data[f"{k}_{i}"] = s.__getattribute__(k) + _data[f"index_{i}"] = i + _data[f"filter_{i}"] = s.filter.json() + series_len = len(data.series) + params = {"user_id": user_id, "project_id": project.project_id, **data.model_dump(), **_data, + "default_config": json.dumps(data.default_config.model_dump()), "card_info": None} + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: + params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) + + query = """INSERT INTO metrics (project_id, user_id, name, is_public, + view_type, metric_type, metric_of, metric_value, + metric_format, default_config, thumbnail, data, + card_info) + VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, + %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, + %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, + %(card_info)s) + RETURNING metric_id""" + if len(data.series) > 0: + query = f"""WITH m AS ({query}) + INSERT INTO metric_series(metric_id, index, name, filter) + VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)" + for i in range(series_len)])} + RETURNING metric_id;""" + + query = cur.mogrify(query, params) + cur.execute(query) + r = cur.fetchone() + if dashboard: + return r["metric_id"] + return {"data": get_card(metric_id=r["metric_id"], project_id=project.project_id, user_id=user_id)} + + +def delete_card(project_id, metric_id, user_id): + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify("""\ + UPDATE public.metrics + SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) + WHERE project_id = %(project_id)s + AND metric_id = %(metric_id)s + AND (user_id = %(user_id)s OR is_public) + RETURNING data;""", + {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) + ) + # for EE only + row = cur.fetchone() + if row: + if row["data"] and not sessions_favorite.favorite_session_exists(session_id=row["data"]["sessionId"]): + keys = sessions_mobs. \ + __get_mob_keys(project_id=project_id, session_id=row["data"]["sessionId"]) + keys += sessions_mobs. \ + __get_mob_keys_deprecated(session_id=row["data"]["sessionId"]) # To support old sessions + tag = config('RETENTION_D_VALUE', default='default') + for k in keys: + try: + extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + logger.warning(f"!!!Error while tagging: {k} to {tag} for heatMap") + logger.error(str(e)) + return {"state": "success"} + + +def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, + data: schemas.CardSessionsSchema + # , range_value=None, start_date=None, end_date=None + ): + # No need for this because UI is sending the full payload + # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + # if card is None: + # return None + # metric: schemas.CardSchema = schemas.CardSchema(**card) + # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) + # if metric is None: + # return None + if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): + return None + for s in data.series: + s.filter.startTimestamp = data.startTimestamp + s.filter.endTimestamp = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) + issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", []) + issue = None + for i in issues_list: + if i.get("issueId", "") == issue_id: + issue = i + break + if issue is None: + issue = issues.get(project_id=project_id, issue_id=issue_id) + if issue is not None: + issue = {**issue, + "affectedSessions": 0, + "affectedUsers": 0, + "conversionImpact": 0, + "lostConversions": 0, + "unaffectedSessions": 0} + return {"seriesId": s.series_id, "seriesName": s.name, + "sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, + issue=issue, data=s.filter) + if issue is not None else {"total": 0, "sessions": []}, + "issue": issue} diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py index 3b890c5e3..d397ca3bf 100644 --- a/ee/api/chalicelib/core/events.py +++ b/ee/api/chalicelib/core/events.py @@ -4,7 +4,7 @@ from decouple import config import schemas from chalicelib.core import issues -from chalicelib.core import sessions_metas +from chalicelib.core.sessions import sessions_metas from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.event_filter_definition import SupportedFilter, Event @@ -169,22 +169,22 @@ SUPPORTED_TYPES = { query=None), # IOS EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.CLICK_MOBILE.ui_type)), + query=autocomplete.__generic_query( + typename=EventType.CLICK_MOBILE.ui_type)), EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.INPUT_MOBILE.ui_type)), + query=autocomplete.__generic_query( + typename=EventType.INPUT_MOBILE.ui_type)), EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.VIEW_MOBILE.ui_type)), - EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE), - query=autocomplete.__generic_query( - typename=EventType.CUSTOM_MOBILE.ui_type)), - EventType.REQUEST_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE), query=autocomplete.__generic_query( - typename=EventType.REQUEST_MOBILE.ui_type)), + typename=EventType.VIEW_MOBILE.ui_type)), + EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE), + query=autocomplete.__generic_query( + typename=EventType.CUSTOM_MOBILE.ui_type)), + EventType.REQUEST_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE), + query=autocomplete.__generic_query( + typename=EventType.REQUEST_MOBILE.ui_type)), EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile, - query=None), + query=None), } diff --git a/ee/api/chalicelib/core/heatmaps.py b/ee/api/chalicelib/core/heatmaps.py index 35ac03751..41cffa237 100644 --- a/ee/api/chalicelib/core/heatmaps.py +++ b/ee/api/chalicelib/core/heatmaps.py @@ -7,7 +7,7 @@ from chalicelib.core import sessions_mobs, events from chalicelib.utils import sql_helper as sh if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from chalicelib.core import sessions_exp as sessions + from chalicelib.core import sessions_ch as sessions else: from chalicelib.core import sessions diff --git a/ee/api/chalicelib/core/issue_tracking/modules/__init__.py b/ee/api/chalicelib/core/issue_tracking/modules/__init__.py new file mode 100644 index 000000000..266d4a821 --- /dev/null +++ b/ee/api/chalicelib/core/issue_tracking/modules/__init__.py @@ -0,0 +1 @@ +TENANT_CONDITION = "tenant_id=%(tenant_id)s" diff --git a/ee/api/chalicelib/core/log_tools.py b/ee/api/chalicelib/core/log_tools.py deleted file mode 100644 index e1903f695..000000000 --- a/ee/api/chalicelib/core/log_tools.py +++ /dev/null @@ -1,106 +0,0 @@ -from chalicelib.utils import pg_client, helper -import json - -EXCEPT = ["jira_server", "jira_cloud"] - - -def search(project_id): - result = [] - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - SELECT supported_integrations.name, - (SELECT COUNT(*) - FROM public.integrations - INNER JOIN public.projects USING (project_id) - WHERE provider = supported_integrations.name - AND project_id = %(project_id)s - AND projects.deleted_at ISNULL - LIMIT 1) AS count - FROM unnest(enum_range(NULL::integration_provider)) AS supported_integrations(name);""", - {"project_id": project_id}) - ) - r = cur.fetchall() - for k in r: - if k["count"] > 0 and k["name"] not in EXCEPT: - result.append({"value": helper.key_to_camel_case(k["name"]), "type": "logTool"}) - return {"data": result} - - -def add(project_id, integration, options): - options = json.dumps(options) - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - INSERT INTO public.integrations(project_id, provider, options) - VALUES (%(project_id)s, %(provider)s, %(options)s::jsonb) - RETURNING *;""", - {"project_id": project_id, "provider": integration, "options": options}) - ) - r = cur.fetchone() - return helper.dict_to_camel_case(helper.flatten_nested_dicts(r)) - - -def get(project_id, integration): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - SELECT integrations.* - FROM public.integrations INNER JOIN public.projects USING(project_id) - WHERE provider = %(provider)s - AND project_id = %(project_id)s - AND projects.deleted_at ISNULL - LIMIT 1;""", - {"project_id": project_id, "provider": integration}) - ) - r = cur.fetchone() - return helper.dict_to_camel_case(helper.flatten_nested_dicts(r)) - - -def edit(project_id, integration, changes): - if "projectId" in changes: - changes.pop("project_id") - if "integration" in changes: - changes.pop("integration") - if len(changes.keys()) == 0: - return None - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.integrations - SET options=options||%(changes)s - WHERE project_id =%(project_id)s AND provider = %(provider)s - RETURNING *;""", - {"project_id": project_id, "provider": integration, "changes": json.dumps(changes)}) - ) - return helper.dict_to_camel_case(helper.flatten_nested_dicts(cur.fetchone())) - - -def delete(project_id, integration): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - DELETE FROM public.integrations - WHERE project_id=%(project_id)s AND provider=%(provider)s;""", - {"project_id": project_id, "provider": integration}) - ) - return {"state": "success"} - - -def get_all_by_tenant(tenant_id, integration): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - SELECT integrations.* - FROM public.integrations INNER JOIN public.projects USING(project_id) - WHERE provider = %(provider)s - AND tenant_id = %(tenant_id)s - AND projects.deleted_at ISNULL;""", - {"tenant_id": tenant_id, "provider": integration}) - ) - r = cur.fetchall() - return helper.list_to_camel_case(r, flatten=True) diff --git a/ee/api/chalicelib/core/log_tools/modules/__init__.py b/ee/api/chalicelib/core/log_tools/modules/__init__.py new file mode 100644 index 000000000..a9d0d44d9 --- /dev/null +++ b/ee/api/chalicelib/core/log_tools/modules/__init__.py @@ -0,0 +1 @@ +TENANT_CONDITION = "tenant_id = %(tenant_id)s" diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics_ch.py similarity index 96% rename from ee/api/chalicelib/core/metrics.py rename to ee/api/chalicelib/core/metrics_ch.py index 13fb95fa4..802555918 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics_ch.py @@ -168,7 +168,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) results = { "value": sum([r["value"] for r in rows]), @@ -187,7 +187,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - count = ch.execute(query=ch_query, params=params) + count = ch.execute(query=ch_query, parameters=params) count = count[0]["count"] @@ -234,7 +234,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC. "endTimestamp": endTimestamp, "step_size": step_size, "status_code": status, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) rows = __nested_array_to_dict_array(rows) neutral = __get_domains_errors_neutral(rows) rows = __merge_rows_with_neutral(rows, neutral) @@ -289,9 +289,9 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d ORDER BY errors_count DESC LIMIT %(limit)s OFFSET %(limit_s)s;""" logger.debug("-----------") - logger.debug(ch.format(query=ch_query, params=params)) + logger.debug(ch.format(query=ch_query, parameters=params)) logger.debug("-----------") - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) response = {"count": 0, "total": 0, "values": []} if len(rows) > 0: response["count"] = rows[0]["count"] @@ -328,8 +328,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch.format(query=ch_query, params=params)) - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) rows = helper.list_to_camel_case(rows) return __complete_missing_steps(rows=rows, start_time=startTimestamp, @@ -416,8 +415,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1) "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch.format(query=ch_query, params=params)) - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) return helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, @@ -466,7 +464,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) return rows @@ -490,7 +488,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, WHERE count>0 GROUP BY timestamp ORDER BY timestamp;""" - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, neutral={"value": 0}) @@ -519,7 +517,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, "value": value, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) result = rows[0] ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, COUNT(1) AS value @@ -527,7 +525,8 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp ORDER BY timestamp;""" - rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + params = {**params, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, parameters=params) rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, neutral={"value": 0}) @@ -559,7 +558,7 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) results = { "value": sum([r["value"] for r in rows]), @@ -578,7 +577,7 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - count = ch.execute(query=ch_query, params=params) + count = ch.execute(query=ch_query, parameters=params) count = count[0]["count"] @@ -606,9 +605,9 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - rows = ch.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, parameters=params) ch_query = f"""SELECT COALESCE(avgOrNull(pages.speed_index),0) AS avg FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages WHERE {" AND ".join(ch_sub_query)};""" - avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 + avg = ch.execute(query=ch_query, parameters=params)[0]["avg"] if len(rows) > 0 else 0 return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND} diff --git a/ee/api/chalicelib/core/product_analytics.py b/ee/api/chalicelib/core/product_analytics.py index 9e7aa23e5..d027c1da4 100644 --- a/ee/api/chalicelib/core/product_analytics.py +++ b/ee/api/chalicelib/core/product_analytics.py @@ -1,8 +1,8 @@ from typing import List import schemas -from chalicelib.core.metrics import __get_basic_constraints, __get_meta_constraint -from chalicelib.core.metrics import __get_constraint_values, __complete_missing_steps +from chalicelib.core.metrics_ch import __get_basic_constraints, __get_meta_constraint +from chalicelib.core.metrics_ch import __get_constraint_values, __complete_missing_steps from chalicelib.utils import ch_client, exp_ch_helper from chalicelib.utils import helper, dev from chalicelib.utils.TimeUTC import TimeUTC diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index ca1bf9812..5d92fbbc6 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -182,3 +182,20 @@ def delete(tenant_id, user_id, role_id): {"tenant_id": tenant_id, "role_id": role_id}) cur.execute(query=query) return get_roles(tenant_id=tenant_id) + + +def get_role(tenant_id, role_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify("""SELECT roles.* + FROM public.roles + WHERE tenant_id =%(tenant_id)s + AND deleted_at IS NULL + AND not service_role + AND role_id = %(role_id)s + LIMIT 1;""", + {"tenant_id": tenant_id, "role_id": role_id}) + cur.execute(query=query) + row = cur.fetchone() + if row is not None: + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + return helper.dict_to_camel_case(row) diff --git a/ee/api/chalicelib/core/sessions/__init__.py b/ee/api/chalicelib/core/sessions/__init__.py new file mode 100644 index 000000000..0d26b2876 --- /dev/null +++ b/ee/api/chalicelib/core/sessions/__init__.py @@ -0,0 +1,12 @@ +import logging + +from decouple import config + +logger = logging.getLogger(__name__) +from . import sessions as sessions_legacy + +if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): + logger.info(">>> Using experimental sessions search") + from . import sessions_ch as sessions +else: + from . import sessions diff --git a/ee/api/chalicelib/core/sessions_devtool.py b/ee/api/chalicelib/core/sessions/sessions_devtool.py similarity index 100% rename from ee/api/chalicelib/core/sessions_devtool.py rename to ee/api/chalicelib/core/sessions/sessions_devtool.py diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions/sessions_favorite.py similarity index 100% rename from ee/api/chalicelib/core/sessions_favorite.py rename to ee/api/chalicelib/core/sessions/sessions_favorite.py diff --git a/ee/api/chalicelib/core/sessions_favorite_exp.py b/ee/api/chalicelib/core/sessions/sessions_favorite_exp.py similarity index 100% rename from ee/api/chalicelib/core/sessions_favorite_exp.py rename to ee/api/chalicelib/core/sessions/sessions_favorite_exp.py diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions/sessions_notes.py similarity index 100% rename from ee/api/chalicelib/core/sessions_notes.py rename to ee/api/chalicelib/core/sessions/sessions_notes.py diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions/sessions_replay.py similarity index 100% rename from ee/api/chalicelib/core/sessions_replay.py rename to ee/api/chalicelib/core/sessions/sessions_replay.py diff --git a/ee/api/chalicelib/core/sessions_viewed.py b/ee/api/chalicelib/core/sessions/sessions_viewed.py similarity index 100% rename from ee/api/chalicelib/core/sessions_viewed.py rename to ee/api/chalicelib/core/sessions/sessions_viewed.py diff --git a/ee/api/chalicelib/core/sessions_viewed_exp.py b/ee/api/chalicelib/core/sessions/sessions_viewed_exp.py similarity index 100% rename from ee/api/chalicelib/core/sessions_viewed_exp.py rename to ee/api/chalicelib/core/sessions/sessions_viewed_exp.py diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py deleted file mode 100644 index c1b7d00a3..000000000 --- a/ee/api/chalicelib/core/sessions_insights.py +++ /dev/null @@ -1,467 +0,0 @@ -from typing import Optional -import logging -import schemas -from chalicelib.core import metrics -from chalicelib.core import sessions_exp -from chalicelib.utils import ch_client - -logger = logging.getLogger(__name__) - - -def _table_slice(table, index): - col = list() - for row in table: - col.append(row[index]) - return col - - -def _table_where(table, index, value): - new_table = list() - for row in table: - if row[index] == value: - new_table.append(row) - return new_table - - -def _sum_table_index(table, index): - s = 0 - count = 0 - for row in table: - v = row[index] - if v is None: - continue - s += v - count += 1 - return s - - -def _mean_table_index(table, index): - s = _sum_table_index(table, index) - c = len(table) - return s / c - - -def _sort_table_index(table, index, reverse=False): - return sorted(table, key=lambda k: k[index], reverse=reverse) - - -def _select_rec(l, selector): - if len(selector) == 1: - return l[selector[0]] - else: - s = selector[0] - L = l[s] - type_ = type(s) - if type_ == slice: - return [_select_rec(l_, selector[1:]) for l_ in L] - elif type_ == int: - return [_select_rec(L, selector[1:])] - - -def __get_two_values(response, time_index='hh', name_index='name'): - columns = list(response[0].keys()) - name_index_val = columns.index(name_index) - time_index_value = columns.index(time_index) - - table = [list(r.values()) for r in response] - table_hh1 = list() - table_hh2 = list() - hh_vals = list() - names_hh1 = list() - names_hh2 = list() - for e in table: - if e[time_index_value] not in hh_vals and len(hh_vals) == 2: - break - elif e[time_index_value] not in hh_vals: - hh_vals.append(e[time_index_value]) - - if len(hh_vals) == 1: - table_hh1.append(e) - if e[name_index_val] not in names_hh1: - names_hh1.append(e[name_index_val]) - elif len(hh_vals) == 2: - table_hh2.append(e) - if e[name_index_val] not in names_hh2: - names_hh2.append(e[name_index_val]) - return table_hh1, table_hh2, columns, names_hh1, names_hh2 - - -def query_requests_by_period(project_id, start_time, end_time, filters: Optional[schemas.SessionsSearchPayloadSchema]): - params = { - "project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time, - "step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3) - } - params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params) - conditions = ["event_type = 'REQUEST'"] - query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, - toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.success) as success_rate, T2.url_host as names, - T2.url_path as source, avg(T2.duration) as avg_duration - FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 - LEFT JOIN (SELECT session_id, url_host, url_path, success, message, duration, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime - FROM experimental.events - {sub_query} - WHERE project_id = {project_id} - AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh - GROUP BY T1.hh, T2.url_host, T2.url_path - ORDER BY T1.hh DESC;""" - with ch_client.ClickHouseClient() as conn: - query = conn.format(query=query, params=params) - logging.debug("--------") - logging.debug(query) - logging.debug("--------") - res = conn.execute(query=query) - if res is None or sum([r.get("sessions") for r in res]) == 0: - return [] - - table_hh1, table_hh2, columns, this_period_hosts, last_period_hosts = __get_two_values(res, time_index='hh', - name_index='source') - test = [k[4] for k in table_hh1] - del res - - new_hosts = [x for x in this_period_hosts if x not in last_period_hosts] - common_names = [x for x in this_period_hosts if x not in new_hosts] - - source_idx = columns.index('source') - duration_idx = columns.index('avg_duration') - # success_idx = columns.index('success_rate') - # delta_duration = dict() - # delta_success = dict() - new_duration_values = dict() - duration_values = dict() - for n in common_names: - d1_tmp = _table_where(table_hh1, source_idx, n) - d2_tmp = _table_where(table_hh2, source_idx, n) - old_duration = _mean_table_index(d2_tmp, duration_idx) - new_duration = _mean_table_index(d1_tmp, duration_idx) - if old_duration == 0: - continue - duration_values[n] = new_duration, old_duration, (new_duration - old_duration) / old_duration - # delta_duration[n] = (_mean_table_index(d1_tmp, duration_idx) - _duration1) / _duration1 - # delta_success[n] = _mean_table_index(d1_tmp, success_idx) - _mean_table_index(d2_tmp, success_idx) - for n in new_hosts: - d1_tmp = _table_where(table_hh1, source_idx, n) - new_duration_values[n] = _mean_table_index(d1_tmp, duration_idx) - - # names_idx = columns.index('names') - total = _sum_table_index(table_hh1, duration_idx) - d1_tmp = _sort_table_index(table_hh1, duration_idx, reverse=True) - _tmp = _table_slice(d1_tmp, duration_idx) - _tmp2 = _table_slice(d1_tmp, source_idx) - - increase = sorted(duration_values.items(), key=lambda k: k[1][-1], reverse=True) - ratio = sorted(zip(_tmp2, _tmp), key=lambda k: k[1], reverse=True) - # names_ = set([k[0] for k in increase[:3]+ratio[:3]]+new_hosts[:3]) - names_ = set([k[0] for k in increase[:3] + ratio[:3]]) # we took out new hosts since they dont give much info - - results = list() - for n in names_: - if n is None: - continue - data_ = {'category': schemas.InsightCategories.NETWORK, 'name': n, - 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} - for n_, v in ratio: - if n == n_: - if n in new_hosts: - data_['value'] = new_duration_values[n] - data_['ratio'] = 100 * v / total - break - for n_, v in increase: - if n == n_: - data_['value'] = v[0] - data_['oldValue'] = v[1] - data_['change'] = 100 * v[2] - data_['isNew'] = False - break - results.append(data_) - return results - - -def __filter_subquery(project_id: int, filters: Optional[schemas.SessionsSearchPayloadSchema], params: dict): - sub_query = "" - if filters and (len(filters.events) > 0 or len(filters.filters)) > 0: - qp_params, sub_query = sessions_exp.search_query_parts_ch(data=filters, project_id=project_id, - error_status=None, - errors_only=True, favorite_only=None, - issue=None, user_id=None) - params = {**params, **qp_params} - # TODO: test if this line impacts other cards beside insights - # sub_query = f"INNER JOIN {sub_query} USING(session_id)" - return params, sub_query - - -def query_most_errors_by_period(project_id, start_time, end_time, - filters: Optional[schemas.SessionsSearchPayloadSchema]): - params = { - "project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time, - "step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3) - } - params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params) - conditions = ["event_type = 'ERROR'"] - query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, - toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, countIf(T2.session_id != 0) as sessions, T2.message_name as names, - groupUniqArray(T2.source) as sources - FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 - LEFT JOIN (SELECT session_id, concat(name,': ', message) as message_name, source, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime - FROM experimental.events - {sub_query} - WHERE project_id = {project_id} - AND datetime >= toDateTime(%(startTimestamp)s/1000) - AND datetime < toDateTime(%(endTimestamp)s/1000) - AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh - GROUP BY T1.hh, T2.message_name - ORDER BY T1.hh DESC;""" - - with ch_client.ClickHouseClient() as conn: - query = conn.format(query=query, params=params) - logging.debug("--------") - logging.debug(query) - logging.debug("--------") - res = conn.execute(query=query) - if res is None or sum([r.get("sessions") for r in res]) == 0: - return [] - - table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh', - name_index='names') - del res - new_errors = [x for x in this_period_errors if x not in last_period_errors] - common_errors = [x for x in this_period_errors if x not in new_errors] - - sessions_idx = columns.index('sessions') - names_idx = columns.index('names') - - percentage_errors = dict() - total = _sum_table_index(table_hh1, sessions_idx) - # error_increase = dict() - new_error_values = dict() - error_values = dict() - for n in this_period_errors: - if n is None: - continue - percentage_errors[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - for n in common_errors: - if n is None: - continue - sum_old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), sessions_idx) - if sum_old_errors == 0: - continue - sum_new_errors = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - # error_increase[n] = (new_errors - old_errors) / old_errors - error_values[n] = sum_new_errors, sum_old_errors, (sum_new_errors - sum_old_errors) / sum_old_errors - ratio = sorted(percentage_errors.items(), key=lambda k: k[1], reverse=True) - increase = sorted(error_values.items(), key=lambda k: k[1][-1], reverse=True) - names_ = set([k[0] for k in increase[:3] + ratio[:3]] + new_errors[:3]) - - results = list() - for n in names_: - if n is None: - continue - data_ = {'category': schemas.InsightCategories.ERRORS, 'name': n, - 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} - for n_, v in ratio: - if n == n_: - if n in new_errors: - data_['value'] = new_error_values[n] - data_['ratio'] = 100 * v / total - break - for n_, v in increase: - if n == n_: - data_['value'] = v[0] - data_['oldValue'] = v[1] - data_['change'] = 100 * v[2] - data_['isNew'] = False - break - results.append(data_) - return results - - -def query_cpu_memory_by_period(project_id, start_time, end_time, - filters: Optional[schemas.SessionsSearchPayloadSchema]): - params = { - "project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time, - "step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3) - } - params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params) - conditions = ["event_type = 'PERFORMANCE'"] - query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, - toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.avg_cpu) as cpu_used, - avg(T2.avg_used_js_heap_size) as memory_used, T2.url_host as names, groupUniqArray(T2.url_path) as sources - FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 - LEFT JOIN (SELECT session_id, url_host, url_path, avg_used_js_heap_size, avg_cpu, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime - FROM experimental.events - {sub_query} - WHERE project_id = {project_id} - AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh - GROUP BY T1.hh, T2.url_host - ORDER BY T1.hh DESC;""" - with ch_client.ClickHouseClient() as conn: - query = conn.format(query=query, params=params) - logging.debug("--------") - logging.debug(query) - logging.debug("--------") - res = conn.execute(query=query) - if res is None or sum([r.get("sessions") for r in res]) == 0: - return [] - - table_hh1, table_hh2, columns, this_period_resources, last_period_resources = __get_two_values(res, time_index='hh', - name_index='names') - - logging.debug(f'TB1\n{table_hh1}') - logging.debug(f'TB2\n{table_hh2}') - del res - - memory_idx = columns.index('memory_used') - cpu_idx = columns.index('cpu_used') - - mem_newvalue = _mean_table_index(table_hh1, memory_idx) - mem_oldvalue = _mean_table_index(table_hh2, memory_idx) - cpu_newvalue = _mean_table_index(table_hh2, cpu_idx) - cpu_oldvalue = _mean_table_index(table_hh2, cpu_idx) - - cpu_ratio = 0 - mem_ratio = 0 - if mem_newvalue == 0: - mem_newvalue = None - mem_ratio = None - if mem_oldvalue == 0: - mem_oldvalue = None - mem_ratio = None - if cpu_newvalue == 0: - cpu_newvalue = None - cpu_ratio = None - if cpu_oldvalue == 0: - cpu_oldvalue = None - cpu_ratio = None - - output = list() - if cpu_oldvalue is not None or cpu_newvalue is not None: - output.append({'category': schemas.InsightCategories.RESOURCES, - 'name': 'cpu', - 'value': cpu_newvalue, - 'oldValue': cpu_oldvalue, - 'change': 100 * ( - cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, - 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) - if mem_oldvalue is not None or mem_newvalue is not None: - output.append({'category': schemas.InsightCategories.RESOURCES, - 'name': 'memory', - 'value': mem_newvalue, - 'oldValue': mem_oldvalue, - 'change': 100 * ( - mem_newvalue - mem_oldvalue) / mem_oldvalue if mem_ratio is not None else mem_ratio, - 'isNew': True if mem_newvalue is not None and mem_oldvalue is None else False}) - return output - - -def query_click_rage_by_period(project_id, start_time, end_time, - filters: Optional[schemas.SessionsSearchPayloadSchema]): - params = { - "project_id": project_id, "startTimestamp": start_time, "endTimestamp": end_time, - "step_size": metrics.__get_step_size(endTimestamp=end_time, startTimestamp=start_time, density=3)} - params, sub_query = __filter_subquery(project_id=project_id, filters=filters, params=params) - conditions = ["issue_type = 'click_rage'", "event_type = 'ISSUE'"] - query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start, - toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end - SELECT T1.hh, countIf(T2.session_id != 0) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources - FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1 - LEFT JOIN (SELECT session_id, url_host, url_path, toStartOfInterval(datetime, INTERVAL %(step_size)s second ) as dtime - FROM experimental.events - {sub_query} - WHERE project_id = %(project_id)s - AND datetime >= toDateTime(%(startTimestamp)s/1000) - AND datetime < toDateTime(%(endTimestamp)s/1000) - AND {" AND ".join(conditions)}) AS T2 ON T2.dtime = T1.hh - GROUP BY T1.hh, T2.url_path - ORDER BY T1.hh DESC;""" - with ch_client.ClickHouseClient() as conn: - query = conn.format(query=query, params=params) - logging.debug("--------") - logging.debug(query) - logging.debug("--------") - res = conn.execute(query=query) - if res is None or sum([r.get("sessions") for r in res]) == 0: - return [] - - table_hh1, table_hh2, columns, this_period_rage, last_period_rage = __get_two_values(res, time_index='hh', - name_index='sources') - del res - - new_names = [x for x in this_period_rage if x not in last_period_rage] - common_names = [x for x in this_period_rage if x not in new_names] - - sessions_idx = columns.index('sessions') - names_idx = columns.index('sources') - - # raged_increment = dict() - raged_values = dict() - new_raged_values = dict() - # TODO verify line (188) _tmp = table_hh2[:, sessions_idx][n].sum() - for n in common_names: - if n is None: - continue - _oldvalue = _sum_table_index(_table_where(table_hh2, names_idx, n), sessions_idx) - _newvalue = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - # raged_increment[n] = (_newvalue - _oldvalue) / _oldvalue - raged_values[n] = _newvalue, _oldvalue, (_newvalue - _oldvalue) / _oldvalue - - for n in new_names: - if n is None: - continue - _newvalue = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - new_raged_values[n] = _newvalue - - total = _sum_table_index(table_hh1, sessions_idx) - names, ratio = _table_slice(table_hh1, names_idx), _table_slice(table_hh1, sessions_idx) - ratio = sorted(zip(names, ratio), key=lambda k: k[1], reverse=True) - increase = sorted(raged_values.items(), key=lambda k: k[1][-1], reverse=True) - names_ = set([k[0] for k in increase[:3] + ratio[:3]] + new_names[:3]) - - results = list() - for n in names_: - if n is None: - continue - data_ = {'category': schemas.InsightCategories.RAGE, 'name': n, - 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} - for n_, v in ratio: - if n == n_: - if n in new_names: - data_['value'] = new_raged_values[n] - data_['ratio'] = 100 * v / total - break - for n_, v in increase: - if n == n_: - data_['value'] = v[0] - data_['oldValue'] = v[1] - data_['change'] = 100 * v[2] - data_['isNew'] = False - break - results.append(data_) - return results - - -def fetch_selected(project_id, data: schemas.GetInsightsSchema): - output = list() - if data.metricValue is None or len(data.metricValue) == 0: - data.metricValue = [] - for v in schemas.InsightCategories: - data.metricValue.append(v) - filters = None - if len(data.series) > 0: - filters = data.series[0].filter - - if schemas.InsightCategories.ERRORS in data.metricValue: - output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp, - end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.NETWORK in data.metricValue: - output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp, - end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.RAGE in data.metricValue: - output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp, - end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.RESOURCES in data.metricValue: - output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp, - end_time=data.endTimestamp, filters=filters) - return output diff --git a/ee/api/chalicelib/core/sessions_metas.py b/ee/api/chalicelib/core/sessions_metas.py deleted file mode 100644 index 97907768b..000000000 --- a/ee/api/chalicelib/core/sessions_metas.py +++ /dev/null @@ -1,82 +0,0 @@ -import schemas -from chalicelib.utils.event_filter_definition import SupportedFilter -from decouple import config - -if config("EXP_AUTOCOMPLETE", cast=bool, default=False): - from . import autocomplete_exp as autocomplete -else: - from . import autocomplete as autocomplete - -SUPPORTED_TYPES = { - schemas.FilterType.USER_OS: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS)), - schemas.FilterType.USER_BROWSER: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER)), - schemas.FilterType.USER_DEVICE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE)), - schemas.FilterType.USER_COUNTRY: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY)), - schemas.FilterType.USER_CITY: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY)), - schemas.FilterType.USER_STATE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE)), - schemas.FilterType.USER_ID: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID)), - schemas.FilterType.USER_ANONYMOUS_ID: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID)), - schemas.FilterType.REV_ID: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID)), - schemas.FilterType.REFERRER: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER)), - schemas.FilterType.UTM_CAMPAIGN: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN)), - schemas.FilterType.UTM_MEDIUM: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM)), - schemas.FilterType.UTM_SOURCE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)), - # MOBILE - schemas.FilterType.USER_OS_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)), - schemas.FilterType.USER_DEVICE_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas( - typename=schemas.FilterType.USER_DEVICE_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE_MOBILE)), - schemas.FilterType.USER_COUNTRY_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE)), - schemas.FilterType.USER_ID_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE)), - schemas.FilterType.USER_ANONYMOUS_ID_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE)), - schemas.FilterType.REV_ID_MOBILE: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE)), - -} - - -def search(text: str, meta_type: schemas.FilterType, project_id: int): - rows = [] - if meta_type not in list(SUPPORTED_TYPES.keys()): - return {"errors": ["unsupported type"]} - rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text) - # for IOS events autocomplete - # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()): - # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text) - return {"data": rows} diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 721715a55..ee76150ff 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -199,6 +199,12 @@ def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, backgrou role_id = data.roleId if role_id is None: role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId") + else: + role = roles.get_role(tenant_id=tenant_id, role_id=role_id) + if role is None: + return {"errors": ["role not found"]} + if role["name"].lower() == "owner" and role["protected"]: + return {"errors": ["invalid role"]} invitation_token = __generate_invitation_token() user = get_deleted_user_by_email(email=data.email) if user is not None and user["tenantId"] == tenant_id: @@ -333,7 +339,7 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, if editor_id != user_id_to_update: admin = get_user_role(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: - return {"errors": ["unauthorized"]} + return {"errors": ["unauthorized, you must have admin privileges"]} if admin["admin"] and user["superAdmin"]: return {"errors": ["only the owner can edit his own details"]} else: @@ -343,10 +349,10 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, return {"errors": ["cannot change your own admin privileges"]} if changes.roleId: if user["superAdmin"] and changes.roleId != user["roleId"]: - changes.roleId = None return {"errors": ["owner's role cannot be changed"]} - - if changes.roleId != user["roleId"]: + elif user["superAdmin"]: + changes.roleId = None + elif changes.roleId != user["roleId"]: return {"errors": ["cannot change your own role"]} if changes.name and len(changes.name) > 0: @@ -357,6 +363,12 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, if changes.roleId is not None: _changes["roleId"] = changes.roleId + role = roles.get_role(tenant_id=tenant_id, role_id=changes.roleId) + if role is None: + return {"errors": ["role not found"]} + else: + if role["name"].lower() == "owner" and role["protected"]: + return {"errors": ["invalid role"]} if len(_changes.keys()) > 0: update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes, output=False) @@ -540,12 +552,6 @@ def set_password_invitation(tenant_id, user_id, new_password): user = update(tenant_id=tenant_id, user_id=user_id, changes=changes) r = authenticate(user['email'], new_password) - tenant_id = r.pop("tenantId") - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - return { "jwt": r.pop("jwt"), "refreshToken": r.pop("refreshToken"), @@ -554,10 +560,7 @@ def set_password_invitation(tenant_id, user_id, new_password): "spotRefreshToken": r.pop("spotRefreshToken"), "spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"), "tenantId": tenant_id, - 'data': { - "scopeState": scope.get_scope(tenant_id), - "user": r - } + **r } diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index 548c5769b..b7f8b644a 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -136,13 +136,13 @@ def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") if data.webhook_id is not None: return update(tenant_id=tenant_id, webhook_id=data.webhook_id, - changes={"endpoint": data.endpoint.unicode_string(), + changes={"endpoint": data.endpoint, "authHeader": data.auth_header, "name": data.name}, replace_none=replace_none) else: return add(tenant_id=tenant_id, - endpoint=data.endpoint.unicode_string(), + endpoint=data.endpoint, auth_header=data.auth_header, name=data.name, replace_none=replace_none) diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index b952bc94b..e4148863f 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -6,46 +6,35 @@ rm -rf ./auth/auth_apikey.py rm -rf ./build.sh rm -rf ./build_alerts.sh rm -rf ./build_crons.sh -rm -rf ./chalicelib/core/alerts.py rm -rf ./chalicelib/core/announcements.py rm -rf ./chalicelib/core/assist.py rm -rf ./chalicelib/core/authorizers.py -rm -rf ./chalicelib/core/autocomplete.py -rm -rf ./chalicelib/core/collaboration_base.py -rm -rf ./chalicelib/core/collaboration_msteams.py -rm -rf ./chalicelib/core/collaboration_slack.py +rm -rf ./chalicelib/core/autocomplete +rm -rf ./chalicelib/core/collaborations rm -rf ./chalicelib/core/countries.py +rm -rf ./chalicelib/core/metrics.py +rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/custom_metrics_predefined.py rm -rf ./chalicelib/core/dashboards.py rm -rf ./chalicelib/core/errors_favorite.py rm -rf ./chalicelib/core/events_mobile.py rm -rf ./chalicelib/core/feature_flags.py rm -rf ./chalicelib/core/funnels.py -rm -rf ./chalicelib/core/integration_base.py -rm -rf ./chalicelib/core/integration_base_issue.py -rm -rf ./chalicelib/core/integration_github.py -rm -rf ./chalicelib/core/integration_github_issue.py -rm -rf ./chalicelib/core/integration_jira_cloud.py -rm -rf ./chalicelib/core/integration_jira_cloud_issue.py +rm -rf ./chalicelib/core/issue_tracking/*.py rm -rf ./chalicelib/core/integrations_manager.py rm -rf ./chalicelib/core/issues.py rm -rf ./chalicelib/core/jobs.py -rm -rf ./chalicelib/core/log_tool_bugsnag.py -rm -rf ./chalicelib/core/log_tool_cloudwatch.py -rm -rf ./chalicelib/core/log_tool_datadog.py -rm -rf ./chalicelib/core/log_tool_elasticsearch.py -rm -rf ./chalicelib/core/log_tool_newrelic.py -rm -rf ./chalicelib/core/log_tool_rollbar.py -rm -rf ./chalicelib/core/log_tool_sentry.py -rm -rf ./chalicelib/core/log_tool_stackdriver.py -rm -rf ./chalicelib/core/log_tool_sumologic.py +rm -rf ./chalicelib/core/log_tools/*.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py -rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/saved_search.py -rm -rf ./chalicelib/core/sessions.py -rm -rf ./chalicelib/core/sessions_assignments.py -rm -rf ./chalicelib/core/sessions_mobs.py +rm -rf ./chalicelib/core/sessions/sessions.py +rm -rf ./chalicelib/core/sessions/sessions_ch.py +rm -rf ./chalicelib/core/sessions/sessions_assignments.py +rm -rf ./chalicelib/core/sessions/sessions_metas.py +rm -rf ./chalicelib/core/sessions/sessions_mobs.py +rm -rf ./chalicelib/core/sessions/performance_event.py +rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py rm -rf ./chalicelib/core/significance.py rm -rf ./chalicelib/core/socket_ios.py rm -rf ./chalicelib/core/sourcemaps.py @@ -97,4 +86,13 @@ rm -rf ./chalicelib/core/db_request_handler.py rm -rf ./chalicelib/utils/or_cache rm -rf ./routers/subs/health.py rm -rf ./chalicelib/core/spot.py -rm -rf ./chalicelib/core/unprocessed_sessions.py \ No newline at end of file +rm -rf ./chalicelib/core/product_anaytics2.py +rm -rf ./chalicelib/utils/ch_client.py +rm -rf ./chalicelib/utils/ch_client_exp.py +rm -rf ./routers/subs/product_anaytics.py +rm -rf ./chalicelib/core/alerts/__init__.py +rm -rf ./chalicelib/core/alerts/alerts.py +rm -rf ./chalicelib/core/alerts/alerts_processor.py +rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py +rm -rf ./chalicelib/core/alerts/alerts_listener.py +rm -rf ./chalicelib/core/alerts/modules/helpers.py diff --git a/ee/api/env.default b/ee/api/env.default index 460062047..527bb56e3 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -11,7 +11,8 @@ captcha_key= captcha_server= CH_COMPRESSION=true ch_host= -ch_port= +ch_port=9000 +ch_port_http=8123 ch_receive_timeout=10 ch_timeout=30 change_password_link=/reset-password?invitation=%s&&pass=%s @@ -83,4 +84,5 @@ SITE_URL= sourcemaps_bucket=sourcemaps sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/sourcemaps/{}/sourcemaps TRACE_PERIOD=300 -TZ=UTC \ No newline at end of file +TZ=UTC +EXP_CH_DRIVER=true \ No newline at end of file diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index 9b7fc1a9d..adfc42bba 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,21 +1,21 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 -boto3==1.35.60 -pyjwt==2.9.0 +boto3==1.35.76 +pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 +clickhouse-driver[lz4]==0.2.9 +clickhouse-connect==0.8.9 elasticsearch==8.16.0 jira==3.8.0 cachetools==5.5.0 -fastapi==0.115.5 -uvicorn[standard]==0.32.0 +fastapi==0.115.6 +uvicorn[standard]==0.32.1 python-decouple==3.8 -pydantic[email]==2.9.2 -apscheduler==3.10.4 +pydantic[email]==2.10.3 +apscheduler==3.11.0 -clickhouse-driver[lz4]==0.2.9 -azure-storage-blob==12.23.1 \ No newline at end of file +azure-storage-blob==12.24.0 \ No newline at end of file diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index 0d4a49ed4..68c115f1c 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -1,21 +1,21 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 -boto3==1.35.60 -pyjwt==2.9.0 +boto3==1.35.76 +pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 +clickhouse-driver[lz4]==0.2.9 +clickhouse-connect==0.8.9 elasticsearch==8.16.0 jira==3.8.0 cachetools==5.5.0 -fastapi==0.115.5 +fastapi==0.115.6 python-decouple==3.8 -pydantic[email]==2.9.2 -apscheduler==3.10.4 +pydantic[email]==2.10.3 +apscheduler==3.11.0 -clickhouse-driver[lz4]==0.2.9 redis==5.2.0 -azure-storage-blob==12.23.1 +azure-storage-blob==12.24.0 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 0ab8a96b4..538364050 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,30 +1,31 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 -boto3==1.35.60 -pyjwt==2.9.0 +boto3==1.35.76 +pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 +clickhouse-driver[lz4]==0.2.9 +clickhouse-connect==0.8.9 elasticsearch==8.16.0 jira==3.8.0 cachetools==5.5.0 -fastapi==0.115.5 -uvicorn[standard]==0.32.0 +fastapi==0.115.6 +uvicorn[standard]==0.32.1 gunicorn==23.0.0 python-decouple==3.8 -pydantic[email]==2.9.2 -apscheduler==3.10.4 +pydantic[email]==2.10.3 +apscheduler==3.11.0 + +redis==5.2.1 -clickhouse-driver[lz4]==0.2.9 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml python3-saml==1.16.0 --no-binary=lxml -python-multipart==0.0.17 +python-multipart==0.0.18 -redis==5.2.0 #confluent-kafka==2.1.0 -azure-storage-blob==12.23.1 +azure-storage-blob==12.24.0 diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index 43192e63a..2b8ba0f37 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,7 +1,7 @@ from typing import Optional from chalicelib.core import roles, traces, assist_records, sessions -from chalicelib.core import sessions_insights, assist_stats +from chalicelib.core import assist_stats from chalicelib.core import unlock, signals from chalicelib.utils import assist_helper @@ -132,13 +132,6 @@ def send_interactions(projectId: int, data: schemas.SignalsSchema = Body(...), return {'data': data} -@app.post('/{projectId}/dashboard/insights', tags=["insights"]) -@app.post('/{projectId}/dashboard/insights', tags=["insights"]) -def sessions_search(projectId: int, data: schemas.GetInsightsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)} - - @public_app.get('/{project_id}/assist-stats/avg', tags=["assist-stats"]) def get_assist_stats_avg( project_id: int, diff --git a/ee/api/schemas/schemas_ee.py b/ee/api/schemas/schemas_ee.py index 1d2197895..a5cc5c78f 100644 --- a/ee/api/schemas/schemas_ee.py +++ b/ee/api/schemas/schemas_ee.py @@ -61,20 +61,6 @@ class SignalsSchema(BaseModel): data: dict = Field(default={}) -class InsightCategories(str, Enum): - ERRORS = "errors" - NETWORK = "network" - RAGE = "rage" - RESOURCES = "resources" - - -class GetInsightsSchema(schemas._TimedSchema): - startTimestamp: int = Field(default=TimeUTC.now(-7)) - endTimestamp: int = Field(default=TimeUTC.now()) - metricValue: List[InsightCategories] = Field(default=[]) - series: List[schemas.CardSeriesSchema] = Field(default=[]) - - class CreateMemberSchema(schemas.CreateMemberSchema): roleId: Optional[int] = Field(default=None) @@ -150,15 +136,3 @@ class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSc user_id: Optional[int] = Field(default=None) query: Optional[str] = Field(default=None) order: Literal["asc", "desc"] = Field(default="desc") - - -# TODO: move these to schema when Insights is supported on PG -class CardInsights(schemas.CardInsights): - metric_value: List[InsightCategories] = Field(default=[]) - - @model_validator(mode="after") - def restrictions(self): - return self - - -CardSchema = ORUnion(Union[schemas.__cards_union_base, CardInsights], discriminator='metric_type') diff --git a/ee/backend/internal/db/datasaver/fts.go b/ee/backend/internal/db/datasaver/fts.go index 34f75b006..15f0fd1e9 100644 --- a/ee/backend/internal/db/datasaver/fts.go +++ b/ee/backend/internal/db/datasaver/fts.go @@ -3,7 +3,9 @@ package datasaver import ( "encoding/json" "log" + "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/queue" ) type NetworkRequestFTS struct { @@ -98,6 +100,12 @@ func WrapGraphQL(m *messages.GraphQL, projID uint32) *GraphQLFTS { } } +func (s *saverImpl) init() { + if s.cfg.UseQuickwit { + s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true) + } +} + func (s *saverImpl) sendToFTS(msg messages.Message, projID uint32) { // Skip, if FTS is disabled if s.producer == nil { diff --git a/ee/backend/internal/db/datasaver/methods.go b/ee/backend/internal/db/datasaver/methods.go deleted file mode 100644 index 1644a1fc0..000000000 --- a/ee/backend/internal/db/datasaver/methods.go +++ /dev/null @@ -1,93 +0,0 @@ -package datasaver - -import ( - "log" - - "openreplay/backend/pkg/db/clickhouse" - "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/env" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/queue" - "openreplay/backend/pkg/sessions" -) - -func (s *saverImpl) init() { - s.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING")) - if err := s.ch.Prepare(); err != nil { - log.Fatalf("can't prepare clickhouse: %s", err) - } - s.pg.SetClickHouse(s.ch) - if s.cfg.UseQuickwit { - s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true) - } -} - -func (s *saverImpl) handleExtraMessage(msg messages.Message) error { - // Get session data - var ( - session *sessions.Session - err error - ) - - if msg.TypeID() == messages.MsgSessionEnd || msg.TypeID() == messages.MsgMobileSessionEnd { - session, err = s.sessions.GetUpdated(msg.SessionID()) - } else { - session, err = s.sessions.Get(msg.SessionID()) - } - if err != nil || session == nil { - log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg) - return err - } - - // Send data to quickwit - s.sendToFTS(msg, session.ProjectID) - - // Handle message - switch m := msg.(type) { - case *messages.SessionEnd: - return s.ch.InsertWebSession(session) - case *messages.PerformanceTrackAggr: - return s.ch.InsertWebPerformanceTrackAggr(session, m) - case *messages.MouseClick: - return s.ch.InsertWebClickEvent(session, m) - // Unique for Web - case *messages.PageEvent: - return s.ch.InsertWebPageEvent(session, m) - case *messages.JSException: - wrapper, _ := types.WrapJSException(m) - return s.ch.InsertWebErrorEvent(session, wrapper) - case *messages.IntegrationEvent: - return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)) - case *messages.IssueEvent: - return s.ch.InsertIssue(session, m) - case *messages.CustomEvent: - return s.ch.InsertCustom(session, m) - case *messages.NetworkRequest: - if err := s.ch.InsertRequest(session, m, session.SaveRequestPayload); err != nil { - log.Printf("can't insert request event into clickhouse: %s", err) - } - case *messages.GraphQL: - return s.ch.InsertGraphQL(session, m) - case *messages.InputChange: - return s.ch.InsertWebInputDuration(session, m) - case *messages.MouseThrashing: - return s.ch.InsertMouseThrashing(session, m) - - // Mobile messages - case *messages.MobileSessionEnd: - return s.ch.InsertMobileSession(session) - case *messages.MobileEvent: - return s.ch.InsertMobileCustom(session, m) - case *messages.MobileClickEvent: - return s.ch.InsertMobileClick(session, m) - case *messages.MobileSwipeEvent: - return s.ch.InsertMobileSwipe(session, m) - case *messages.MobileInputEvent: - return s.ch.InsertMobileInput(session, m) - case *messages.MobileNetworkCall: - return s.ch.InsertMobileRequest(session, m, session.SaveRequestPayload) - case *messages.MobileCrash: - return s.ch.InsertMobileCrash(session, m) - } - return nil -} diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go deleted file mode 100644 index b61acd547..000000000 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ /dev/null @@ -1,713 +0,0 @@ -package clickhouse - -import ( - "errors" - "fmt" - "github.com/ClickHouse/clickhouse-go/v2" - "github.com/ClickHouse/clickhouse-go/v2/lib/driver" - "log" - "openreplay/backend/pkg/db/types" - "openreplay/backend/pkg/hashid" - "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/sessions" - "openreplay/backend/pkg/url" - "os" - "strings" - "time" - - "openreplay/backend/pkg/license" -) - -type Connector interface { - Prepare() error - Commit() error - Stop() error - // Web - InsertWebSession(session *sessions.Session) error - InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error - InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error - InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error - InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error - InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error - InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error - InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error - InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error - InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error - InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error - InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error - // Mobile - InsertMobileSession(session *sessions.Session) error - InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error - InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error - InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error - InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error - InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error - InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error -} - -type task struct { - bulks []Bulk -} - -func NewTask() *task { - return &task{bulks: make([]Bulk, 0, 21)} -} - -type connectorImpl struct { - conn driver.Conn - batches map[string]Bulk //driver.Batch - workerTask chan *task - done chan struct{} - finished chan struct{} -} - -func getEnv(key, fallback string) string { - if value, ok := os.LookupEnv(key); ok { - return value - } - return fallback -} - -func NewConnector(url string) Connector { - license.CheckLicense() - url = strings.TrimPrefix(url, "tcp://") - url = strings.TrimSuffix(url, "/default") - userName := getEnv("CH_USERNAME", "default") - password := getEnv("CH_PASSWORD", "") - conn, err := clickhouse.Open(&clickhouse.Options{ - Addr: []string{url}, - Auth: clickhouse.Auth{ - Database: "default", - Username: userName, - Password: password, - }, - MaxOpenConns: 20, - MaxIdleConns: 15, - ConnMaxLifetime: 3 * time.Minute, - Compression: &clickhouse.Compression{ - Method: clickhouse.CompressionLZ4, - }, - }) - if err != nil { - log.Fatal(err) - } - - c := &connectorImpl{ - conn: conn, - batches: make(map[string]Bulk, 20), - workerTask: make(chan *task, 1), - done: make(chan struct{}), - finished: make(chan struct{}), - } - go c.worker() - return c -} - -func (c *connectorImpl) newBatch(name, query string) error { - batch, err := NewBulk(c.conn, name, query) - if err != nil { - return fmt.Errorf("can't create new batch: %s", err) - } - c.batches[name] = batch - return nil -} - -var batches = map[string]string{ - // Web - "sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?)", - "autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))", - "pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, url_path, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?)", - "clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type, selector, normalized_x, normalized_y, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))", - "inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", - "errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type, transfer_size, url_path) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000))", - "custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", - "graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", - "issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))", - "issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)", - //Mobile - "ios_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)", - "ios_custom": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", - "ios_clicks": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)", - "ios_swipes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, direction, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)", - "ios_inputs": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)", - "ios_requests": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)", - "ios_crashes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, reason, stacktrace, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", -} - -func (c *connectorImpl) Prepare() error { - for table, query := range batches { - if err := c.newBatch(table, query); err != nil { - return fmt.Errorf("can't create %s batch: %s", table, err) - } - } - return nil -} - -func (c *connectorImpl) Commit() error { - newTask := NewTask() - for _, b := range c.batches { - newTask.bulks = append(newTask.bulks, b) - } - c.batches = make(map[string]Bulk, 20) - if err := c.Prepare(); err != nil { - log.Printf("can't prepare new CH batch set: %s", err) - } - c.workerTask <- newTask - return nil -} - -func (c *connectorImpl) Stop() error { - c.done <- struct{}{} - <-c.finished - return c.conn.Close() -} - -func (c *connectorImpl) sendBulks(t *task) { - for _, b := range t.bulks { - if err := b.Send(); err != nil { - log.Printf("can't send batch: %s", err) - } - } -} - -func (c *connectorImpl) worker() { - for { - select { - case t := <-c.workerTask: - c.sendBulks(t) - case <-c.done: - for t := range c.workerTask { - c.sendBulks(t) - } - c.finished <- struct{}{} - return - } - } -} - -func (c *connectorImpl) checkError(name string, err error) { - if err != clickhouse.ErrBatchAlreadySent { - log.Printf("can't create %s batch after failed append operation: %s", name, err) - } -} - -func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error { - if msg.Label == "" { - return nil - } - if err := c.batches["inputs"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Label, - "INPUT", - nullableUint16(uint16(msg.InputDuration)), - nullableUint32(uint32(msg.HesitationTime)), - ); err != nil { - c.checkError("inputs", err) - return fmt.Errorf("can't append to inputs batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error { - issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp) - // Insert issue event to batches - if err := c.batches["issuesEvents"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - issueID, - "mouse_thrashing", - "ISSUE", - msg.Url, - extractUrlPath(msg.Url), - ); err != nil { - c.checkError("issuesEvents", err) - return fmt.Errorf("can't append to issuesEvents batch: %s", err) - } - if err := c.batches["issues"].Append( - uint16(session.ProjectID), - issueID, - "mouse_thrashing", - msg.Url, - ); err != nil { - c.checkError("issues", err) - return fmt.Errorf("can't append to issues batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error { - issueID := hashid.IssueID(session.ProjectID, msg) - // Check issue type before insert to avoid panic from clickhouse lib - switch msg.Type { - case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing", "app_crash": - default: - return fmt.Errorf("unknown issueType: %s", msg.Type) - } - // Insert issue event to batches - if err := c.batches["issuesEvents"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), - issueID, - msg.Type, - "ISSUE", - msg.URL, - extractUrlPath(msg.URL), - ); err != nil { - c.checkError("issuesEvents", err) - return fmt.Errorf("can't append to issuesEvents batch: %s", err) - } - if err := c.batches["issues"].Append( - uint16(session.ProjectID), - issueID, - msg.Type, - msg.ContextString, - ); err != nil { - c.checkError("issues", err) - return fmt.Errorf("can't append to issues batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertWebSession(session *sessions.Session) error { - if session.Duration == nil { - return errors.New("trying to insert session with nil duration") - } - if err := c.batches["sessions"].Append( - session.SessionID, - uint16(session.ProjectID), - session.UserID, - session.UserUUID, - session.UserOS, - nullableString(session.UserOSVersion), - nullableString(session.UserDevice), - session.UserDeviceType, - session.UserCountry, - session.UserState, - session.UserCity, - datetime(session.Timestamp), - uint32(*session.Duration), - uint16(session.PagesCount), - uint16(session.EventsCount), - uint16(session.ErrorsCount), - uint32(session.IssueScore), - session.Referrer, - session.IssueTypes, - session.TrackerVersion, - session.UserBrowser, - nullableString(session.UserBrowserVersion), - session.Metadata1, - session.Metadata2, - session.Metadata3, - session.Metadata4, - session.Metadata5, - session.Metadata6, - session.Metadata7, - session.Metadata8, - session.Metadata9, - session.Metadata10, - session.Timezone, - session.UtmSource, - session.UtmMedium, - session.UtmCampaign, - ); err != nil { - c.checkError("sessions", err) - return fmt.Errorf("can't append to sessions batch: %s", err) - } - return nil -} - -func extractUrlPath(fullUrl string) string { - _, path, query, err := url.GetURLParts(fullUrl) - if err != nil { - log.Printf("can't parse url: %s", err) - return "" - } - pathQuery := path - if query != "" { - pathQuery += "?" + query - } - return strings.ToLower(pathQuery) -} - -func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error { - if err := c.batches["pages"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), - msg.URL, - nullableUint16(uint16(msg.RequestStart)), - nullableUint16(uint16(msg.ResponseStart)), - nullableUint16(uint16(msg.ResponseEnd)), - nullableUint16(uint16(msg.DomContentLoadedEventStart)), - nullableUint16(uint16(msg.DomContentLoadedEventEnd)), - nullableUint16(uint16(msg.LoadEventStart)), - nullableUint16(uint16(msg.LoadEventEnd)), - nullableUint16(uint16(msg.FirstPaint)), - nullableUint16(uint16(msg.FirstContentfulPaint)), - nullableUint16(uint16(msg.SpeedIndex)), - nullableUint16(uint16(msg.VisuallyComplete)), - nullableUint16(uint16(msg.TimeToInteractive)), - extractUrlPath(msg.URL), - "LOCATION", - ); err != nil { - c.checkError("pages", err) - return fmt.Errorf("can't append to pages batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error { - if msg.Label == "" { - return nil - } - var nX *float32 = nil - var nY *float32 = nil - if msg.NormalizedX != 101 && msg.NormalizedY != 101 { - // To support previous versions of tracker - if msg.NormalizedX <= 100 && msg.NormalizedY <= 100 { - msg.NormalizedX *= 100 - msg.NormalizedY *= 100 - } - normalizedX := float32(msg.NormalizedX) / 100.0 - normalizedY := float32(msg.NormalizedY) / 100.0 - nXVal := normalizedX - nX = &nXVal - nYVal := normalizedY - nY = &nYVal - } - if err := c.batches["clicks"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Label, - nullableUint32(uint32(msg.HesitationTime)), - "CLICK", - msg.Selector, - nX, - nY, - msg.Url, - extractUrlPath(msg.Url), - ); err != nil { - c.checkError("clicks", err) - return fmt.Errorf("can't append to clicks batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error { - keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags)) - for k, v := range msg.Tags { - keys = append(keys, k) - values = append(values, v) - } - // Check error source before insert to avoid panic from clickhouse lib - switch msg.Source { - case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic": - default: - return fmt.Errorf("unknown error source: %s", msg.Source) - } - msgID, _ := msg.ID(session.ProjectID) - // Insert event to batch - if err := c.batches["errors"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), - msg.Source, - nullableString(msg.Name), - msg.Message, - msgID, - "ERROR", - keys, - values, - ); err != nil { - c.checkError("errors", err) - return fmt.Errorf("can't append to errors batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error { - var timestamp uint64 = (msg.TimestampStart + msg.TimestampEnd) / 2 - if err := c.batches["performance"].Append( - session.SessionID, - uint16(session.ProjectID), - uint64(0), // TODO: find messageID for performance events - datetime(timestamp), - nullableString(msg.Meta().Url), - uint8(msg.MinFPS), - uint8(msg.AvgFPS), - uint8(msg.MaxFPS), - uint8(msg.MinCPU), - uint8(msg.AvgCPU), - uint8(msg.MaxCPU), - msg.MinTotalJSHeapSize, - msg.AvgTotalJSHeapSize, - msg.MaxTotalJSHeapSize, - msg.MinUsedJSHeapSize, - msg.AvgUsedJSHeapSize, - msg.MaxUsedJSHeapSize, - "PERFORMANCE", - ); err != nil { - c.checkError("performance", err) - return fmt.Errorf("can't append to performance batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error { - if len(msgValue) == 0 { - return nil - } - if err := c.batches["autocompletes"].Append( - uint16(session.ProjectID), - msgType, - msgValue, - ); err != nil { - c.checkError("autocompletes", err) - return fmt.Errorf("can't append to autocompletes batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error { - urlMethod := url.EnsureMethod(msg.Method) - if urlMethod == "" { - return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method) - } - var request, response *string - if savePayload { - request = &msg.Request - response = &msg.Response - } - if err := c.batches["requests"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.Meta().Index, - datetime(uint64(msg.Meta().Timestamp)), - msg.URL, - request, - response, - uint16(msg.Status), - url.EnsureMethod(msg.Method), - uint16(msg.Duration), - msg.Status < 400, - "REQUEST", - uint32(msg.TransferredBodySize), - extractUrlPath(msg.URL), - ); err != nil { - c.checkError("requests", err) - return fmt.Errorf("can't append to requests batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error { - if err := c.batches["custom"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.Meta().Index, - datetime(uint64(msg.Meta().Timestamp)), - msg.Name, - msg.Payload, - "CUSTOM", - ); err != nil { - c.checkError("custom", err) - return fmt.Errorf("can't append to custom batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error { - if err := c.batches["graphql"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.Meta().Index, - datetime(uint64(msg.Meta().Timestamp)), - msg.OperationName, - nullableString(msg.Variables), - nullableString(msg.Response), - "GRAPHQL", - ); err != nil { - c.checkError("graphql", err) - return fmt.Errorf("can't append to graphql batch: %s", err) - } - return nil -} - -// Mobile events - -func (c *connectorImpl) InsertMobileSession(session *sessions.Session) error { - if session.Duration == nil { - return errors.New("trying to insert mobile session with nil duration") - } - if err := c.batches["ios_sessions"].Append( - session.SessionID, - uint16(session.ProjectID), - session.UserID, - session.UserUUID, - session.UserOS, - nullableString(session.UserOSVersion), - nullableString(session.UserDevice), - session.UserDeviceType, - session.UserCountry, - session.UserState, - session.UserCity, - datetime(session.Timestamp), - uint32(*session.Duration), - uint16(session.PagesCount), - uint16(session.EventsCount), - uint16(session.ErrorsCount), - uint32(session.IssueScore), - session.Referrer, - session.IssueTypes, - session.TrackerVersion, - session.UserBrowser, - nullableString(session.UserBrowserVersion), - session.Metadata1, - session.Metadata2, - session.Metadata3, - session.Metadata4, - session.Metadata5, - session.Metadata6, - session.Metadata7, - session.Metadata8, - session.Metadata9, - session.Metadata10, - "ios", - session.Timezone, - ); err != nil { - c.checkError("ios_sessions", err) - return fmt.Errorf("can't append to sessions batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error { - if err := c.batches["ios_custom"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.Meta().Index, - datetime(uint64(msg.Meta().Timestamp)), - msg.Name, - msg.Payload, - "CUSTOM", - ); err != nil { - c.checkError("ios_custom", err) - return fmt.Errorf("can't append to mobile custom batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error { - if msg.Label == "" { - return nil - } - if err := c.batches["ios_clicks"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Label, - "TAP", - ); err != nil { - c.checkError("ios_clicks", err) - return fmt.Errorf("can't append to mobile clicks batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error { - if msg.Label == "" { - return nil - } - if err := c.batches["ios_swipes"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Label, - nullableString(msg.Direction), - "SWIPE", - ); err != nil { - c.checkError("ios_clicks", err) - return fmt.Errorf("can't append to mobile clicks batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error { - if msg.Label == "" { - return nil - } - if err := c.batches["ios_inputs"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Label, - "INPUT", - ); err != nil { - c.checkError("ios_inputs", err) - return fmt.Errorf("can't append to mobile inputs batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error { - urlMethod := url.EnsureMethod(msg.Method) - if urlMethod == "" { - return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method) - } - var request, response *string - if savePayload { - request = &msg.Request - response = &msg.Response - } - if err := c.batches["ios_requests"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.Meta().Index, - datetime(uint64(msg.Meta().Timestamp)), - msg.URL, - request, - response, - uint16(msg.Status), - url.EnsureMethod(msg.Method), - uint16(msg.Duration), - msg.Status < 400, - "REQUEST", - ); err != nil { - c.checkError("ios_requests", err) - return fmt.Errorf("can't append to mobile requests batch: %s", err) - } - return nil -} - -func (c *connectorImpl) InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error { - if err := c.batches["ios_crashes"].Append( - session.SessionID, - uint16(session.ProjectID), - msg.MsgID(), - datetime(msg.Timestamp), - msg.Name, - msg.Reason, - msg.Stacktrace, - "CRASH", - ); err != nil { - c.checkError("ios_crashes", err) - return fmt.Errorf("can't append to mobile crashges batch: %s", err) - } - return nil -} diff --git a/ee/backend/pkg/spot/auth/authorizer.go b/ee/backend/pkg/server/auth/authorizer.go similarity index 83% rename from ee/backend/pkg/spot/auth/authorizer.go rename to ee/backend/pkg/server/auth/authorizer.go index 244961318..fe416b8c5 100644 --- a/ee/backend/pkg/spot/auth/authorizer.go +++ b/ee/backend/pkg/server/auth/authorizer.go @@ -1,8 +1,12 @@ package auth -import "fmt" +import ( + "fmt" -func (a *authImpl) IsAuthorized(authHeader string, permissions []string, isExtension bool) (*User, error) { + "openreplay/backend/pkg/server/user" +) + +func (a *authImpl) IsAuthorized(authHeader string, permissions []string, isExtension bool) (*user.User, error) { secret := a.secret if isExtension { secret = a.spotSecret diff --git a/ee/backend/pkg/spot/api/permissions.go b/ee/backend/pkg/server/auth/permissions.go similarity index 93% rename from ee/backend/pkg/spot/api/permissions.go rename to ee/backend/pkg/server/auth/permissions.go index 1da671bf5..776be57d3 100644 --- a/ee/backend/pkg/spot/api/permissions.go +++ b/ee/backend/pkg/server/auth/permissions.go @@ -1,4 +1,4 @@ -package api +package auth import "strings" diff --git a/ee/backend/pkg/spot/auth/storage.go b/ee/backend/pkg/server/auth/storage.go similarity index 87% rename from ee/backend/pkg/spot/auth/storage.go rename to ee/backend/pkg/server/auth/storage.go index 25d623c34..531321eef 100644 --- a/ee/backend/pkg/spot/auth/storage.go +++ b/ee/backend/pkg/server/auth/storage.go @@ -2,11 +2,13 @@ package auth import ( "fmt" - "openreplay/backend/pkg/db/postgres/pool" "strings" + + "openreplay/backend/pkg/db/postgres/pool" + "openreplay/backend/pkg/server/user" ) -func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*User, error) { +func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*user.User, error) { sql := `SELECT user_id, users.tenant_id, users.name, email, EXTRACT(epoch FROM spot_jwt_iat)::BIGINT AS spot_jwt_iat, roles.permissions FROM users JOIN tenants on users.tenant_id = tenants.tenant_id @@ -15,7 +17,7 @@ func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (* if !isExtension { sql = strings.ReplaceAll(sql, "spot_jwt_iat", "jwt_iat") } - user := &User{} + user := &user.User{} var permissions []string if err := conn.QueryRow(sql, userID, tenantID). Scan(&user.ID, &user.TenantID, &user.Name, &user.Email, &user.JwtIat, &permissions); err != nil { @@ -33,3 +35,10 @@ func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (* } return user, nil } + +func abs(x int) int { + if x < 0 { + return -x + } + return x +} diff --git a/ee/backend/pkg/spot/service/user.go b/ee/backend/pkg/server/keys/user.go similarity index 88% rename from ee/backend/pkg/spot/service/user.go rename to ee/backend/pkg/server/keys/user.go index ec9e2bb69..b2857d3e7 100644 --- a/ee/backend/pkg/spot/service/user.go +++ b/ee/backend/pkg/server/keys/user.go @@ -1,3 +1,3 @@ -package service +package keys var getUserSQL = `SELECT tenant_id, name, email FROM public.users WHERE user_id = $1 AND deleted_at IS NULL LIMIT 1` diff --git a/ee/backend/pkg/spot/api/tracer.go b/ee/backend/pkg/spot/api/tracer.go deleted file mode 100644 index 3a4fd9647..000000000 --- a/ee/backend/pkg/spot/api/tracer.go +++ /dev/null @@ -1,61 +0,0 @@ -package api - -import ( - "encoding/json" - "net/http" - - "github.com/gorilla/mux" - - "openreplay/backend/pkg/spot/auth" - "openreplay/backend/pkg/spot/service" -) - -var routeMatch = map[string]string{ - "POST" + "/v1/spots": "createSpot", - "GET" + "/v1/spots/{id}": "getSpot", - "PATCH" + "/v1/spots/{id}": "updateSpot", - "GET" + "/v1/spots": "getSpots", - "DELETE" + "/v1/spots": "deleteSpots", - "POST" + "/v1/spots/{id}/comment": "addComment", - "GET" + "/v1/spots/{id}/video": "getSpotVideo", - "PATCH" + "/v1/spots/{id}/public-key": "updatePublicKey", -} - -func (e *Router) logRequest(r *http.Request, bodyBytes []byte, statusCode int) { - pathTemplate, err := mux.CurrentRoute(r).GetPathTemplate() - if err != nil { - e.log.Error(r.Context(), "failed to get path template: %s", err) - } - e.log.Info(r.Context(), "path template: %s", pathTemplate) - if _, ok := routeMatch[r.Method+pathTemplate]; !ok { - e.log.Debug(r.Context(), "no match for route: %s %s", r.Method, pathTemplate) - return - } - // Convert the parameters to json - query := r.URL.Query() - params := make(map[string]interface{}) - for key, values := range query { - if len(values) > 1 { - params[key] = values - } else { - params[key] = values[0] - } - } - jsonData, err := json.Marshal(params) - if err != nil { - e.log.Error(r.Context(), "failed to marshal query parameters: %s", err) - } - requestData := &service.RequestData{ - Action: routeMatch[r.Method+pathTemplate], - Method: r.Method, - PathFormat: pathTemplate, - Endpoint: r.URL.Path, - Payload: bodyBytes, - Parameters: jsonData, - Status: statusCode, - } - userData := r.Context().Value("userData").(*auth.User) - e.services.Tracer.Trace(userData, requestData) - // DEBUG - e.log.Info(r.Context(), "request data: %v", requestData) -} diff --git a/ee/backend/pkg/spot/builder.go b/ee/backend/pkg/spot/builder.go deleted file mode 100644 index b1827897d..000000000 --- a/ee/backend/pkg/spot/builder.go +++ /dev/null @@ -1,45 +0,0 @@ -package spot - -import ( - "openreplay/backend/internal/config/spot" - "openreplay/backend/pkg/db/postgres/pool" - "openreplay/backend/pkg/flakeid" - "openreplay/backend/pkg/logger" - "openreplay/backend/pkg/objectstorage" - "openreplay/backend/pkg/objectstorage/store" - "openreplay/backend/pkg/spot/auth" - "openreplay/backend/pkg/spot/service" - "openreplay/backend/pkg/spot/transcoder" -) - -type ServicesBuilder struct { - Flaker *flakeid.Flaker - ObjStorage objectstorage.ObjectStorage - Auth auth.Auth - Spots service.Spots - Keys service.Keys - Transcoder transcoder.Transcoder - Tracer service.Tracer -} - -func NewServiceBuilder(log logger.Logger, cfg *spot.Config, pgconn pool.Pool) (*ServicesBuilder, error) { - objStore, err := store.NewStore(&cfg.ObjectsConfig) - if err != nil { - return nil, err - } - flaker := flakeid.NewFlaker(cfg.WorkerID) - tracer, err := service.NewTracer(log, pgconn) - if err != nil { - return nil, err - } - spots := service.NewSpots(log, pgconn, flaker) - return &ServicesBuilder{ - Flaker: flaker, - ObjStorage: objStore, - Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn), - Spots: spots, - Keys: service.NewKeys(log, pgconn), - Transcoder: transcoder.NewTranscoder(cfg, log, objStore, pgconn, spots), - Tracer: tracer, - }, nil -} diff --git a/ee/backend/pkg/spot/service/tracer.go b/ee/backend/pkg/spot/service/tracer.go deleted file mode 100644 index 8c3342470..000000000 --- a/ee/backend/pkg/spot/service/tracer.go +++ /dev/null @@ -1,104 +0,0 @@ -package service - -import ( - "context" - "errors" - "openreplay/backend/pkg/db/postgres" - db "openreplay/backend/pkg/db/postgres/pool" - "openreplay/backend/pkg/logger" - "openreplay/backend/pkg/pool" - "openreplay/backend/pkg/spot/auth" -) - -type Tracer interface { - Trace(user *auth.User, data *RequestData) error - Close() error -} - -type tracerImpl struct { - log logger.Logger - conn db.Pool - traces postgres.Bulk - saver pool.WorkerPool -} - -func NewTracer(log logger.Logger, conn db.Pool) (Tracer, error) { - switch { - case log == nil: - return nil, errors.New("logger is required") - case conn == nil: - return nil, errors.New("connection is required") - } - tracer := &tracerImpl{ - log: log, - conn: conn, - } - if err := tracer.initBulk(); err != nil { - return nil, err - } - tracer.saver = pool.NewPool(1, 200, tracer.sendTraces) - return tracer, nil -} - -func (t *tracerImpl) initBulk() (err error) { - t.traces, err = postgres.NewBulk(t.conn, - "traces", - "(user_id, tenant_id, auth, action, method, path_format, endpoint, payload, parameters, status)", - "($%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d)", - 10, 50) - if err != nil { - return err - } - return nil -} - -type Task struct { - UserID *uint64 - TenantID uint64 - Auth *string - Data *RequestData -} - -func (t *tracerImpl) sendTraces(payload interface{}) { - rec := payload.(*Task) - t.log.Info(context.Background(), "Sending traces, %v", rec) - if err := t.traces.Append(rec.UserID, rec.TenantID, rec.Auth, rec.Data.Action, rec.Data.Method, rec.Data.PathFormat, - rec.Data.Endpoint, rec.Data.Payload, rec.Data.Parameters, rec.Data.Status); err != nil { - t.log.Error(context.Background(), "can't append trace: %s", err) - } -} - -type RequestData struct { - Action string - Method string - PathFormat string - Endpoint string - Payload []byte - Parameters []byte - Status int -} - -func (t *tracerImpl) Trace(user *auth.User, data *RequestData) error { - switch { - case user == nil: - return errors.New("user is required") - case data == nil: - return errors.New("request is required") - } - trace := &Task{ - UserID: &user.ID, - TenantID: user.TenantID, - Auth: &user.AuthMethod, - Data: data, - } - t.saver.Submit(trace) - return nil -} - -func (t *tracerImpl) Close() error { - t.saver.Stop() - if err := t.traces.Send(); err != nil { - return err - } - return nil -} diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.22.0/1.22.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.22.0/1.22.0.sql new file mode 100644 index 000000000..caac93e06 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.22.0/1.22.0.sql @@ -0,0 +1 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee'; diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 6cb9078ef..1b7ca4dbb 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -1,4 +1,4 @@ -CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.21.0-ee'; +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee'; CREATE DATABASE IF NOT EXISTS experimental; CREATE TABLE IF NOT EXISTS experimental.autocomplete diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.22.0/1.22.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.22.0/1.22.0.sql new file mode 100644 index 000000000..28dd9ed7f --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.22.0/1.22.0.sql @@ -0,0 +1,32 @@ +\set previous_version 'v1.21.0-ee' +\set next_version 'v1.22.0-ee' +SELECT openreplay_version() AS current_version, + openreplay_version() = :'previous_version' AS valid_previous, + openreplay_version() = :'next_version' AS is_next +\gset + +\if :valid_previous +\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version' +BEGIN; +SELECT format($fn_def$ +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT '%1$s' +$$ LANGUAGE sql IMMUTABLE; +$fn_def$, :'next_version') +\gexec + +-- + +DELETE +FROM public.metrics +WHERE metrics.metric_type = 'insights'; + +COMMIT; + +\elif :is_next +\echo new version detected :'next_version', nothing to do +\else +\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version' +\endif diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 22c7ecbad..01c31b0a9 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -1,4 +1,4 @@ -\set or_version 'v1.21.0-ee' +\set or_version 'v1.22.0-ee' SET client_min_messages TO NOTICE; \set ON_ERROR_STOP true SELECT EXISTS (SELECT 1 diff --git a/ee/scripts/schema/db/rollback_dbs/clickhouse/1.22.0/1.22.0.sql b/ee/scripts/schema/db/rollback_dbs/clickhouse/1.22.0/1.22.0.sql new file mode 100644 index 000000000..c60511f00 --- /dev/null +++ b/ee/scripts/schema/db/rollback_dbs/clickhouse/1.22.0/1.22.0.sql @@ -0,0 +1 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.21.0-ee'; diff --git a/ee/scripts/schema/db/rollback_dbs/postgresql/1.22.0/1.22.0.sql b/ee/scripts/schema/db/rollback_dbs/postgresql/1.22.0/1.22.0.sql new file mode 100644 index 000000000..c06fb99c5 --- /dev/null +++ b/ee/scripts/schema/db/rollback_dbs/postgresql/1.22.0/1.22.0.sql @@ -0,0 +1,26 @@ +\set previous_version 'v1.22.0-ee' +\set next_version 'v1.21.0-ee' +SELECT openreplay_version() AS current_version, + openreplay_version() = :'previous_version' AS valid_previous, + openreplay_version() = :'next_version' AS is_next +\gset + +\if :valid_previous +\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version' +BEGIN; +SELECT format($fn_def$ +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT '%1$s' +$$ LANGUAGE sql IMMUTABLE; +$fn_def$, :'next_version') +\gexec + +COMMIT; + +\elif :is_next +\echo new version detected :'next_version', nothing to do +\else +\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version' +\endif \ No newline at end of file diff --git a/frontend/app/api_client.ts b/frontend/app/api_client.ts index 431cea014..a8ac9b3b2 100644 --- a/frontend/app/api_client.ts +++ b/frontend/app/api_client.ts @@ -198,6 +198,11 @@ export default class APIClient { } return fetch(edp + _path, init).then((response) => { + if (response.status === 403) { + console.warn('API returned 403. Clearing JWT token.'); + this.onUpdateJwt({ jwt: undefined }); // Clear the token + } + if (response.ok) { return response; } else { diff --git a/frontend/app/components/Client/Integrations/apiMethods.ts b/frontend/app/components/Client/Integrations/apiMethods.ts index e5115bd13..3924a068b 100644 --- a/frontend/app/components/Client/Integrations/apiMethods.ts +++ b/frontend/app/components/Client/Integrations/apiMethods.ts @@ -35,6 +35,13 @@ export function useIntegration( return initialValues; }, initialData: initialValues, + retry: (failureCount, error) => { + const status = error.status || error.response.status + if (status === 404) { + return false; + } + return failureCount < 4; + } }); const saveMutation = useMutation({ diff --git a/frontend/app/components/Client/Users/components/UserForm/UserForm.tsx b/frontend/app/components/Client/Users/components/UserForm/UserForm.tsx index 9b5b9bd03..2c1ee6e2c 100644 --- a/frontend/app/components/Client/Users/components/UserForm/UserForm.tsx +++ b/frontend/app/components/Client/Users/components/UserForm/UserForm.tsx @@ -18,7 +18,7 @@ function UserForm() { const isSaving = userStore.saving; const user: any = userStore.instance || userStore.initUser(); const roles = roleStore.list - .filter((r) => (r.isProtected ? user.isSuperAdmin : true)) + .filter((r) => (r.protected ? user.isSuperAdmin : true)) .map((r) => ({ label: r.name, value: r.roleId })); const onChangeCheckbox = (e: any) => { diff --git a/frontend/app/components/Dashboard/components/DashboardList/NewDashModal/Examples/Count.tsx b/frontend/app/components/Dashboard/components/DashboardList/NewDashModal/Examples/Count.tsx index 044824ab0..6dd084040 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/NewDashModal/Examples/Count.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/NewDashModal/Examples/Count.tsx @@ -9,7 +9,6 @@ import { import React from 'react'; import ExCard from './ExCard'; -import { size } from '@floating-ui/react-dom-interactions'; const TYPES = { Frustrations: 'frustrations', diff --git a/frontend/app/components/Dashboard/components/Funnels/FunnelIssuesSort/FunnelIssuesSort.tsx b/frontend/app/components/Dashboard/components/Funnels/FunnelIssuesSort/FunnelIssuesSort.tsx index 580f6a4e7..eae83c842 100644 --- a/frontend/app/components/Dashboard/components/Funnels/FunnelIssuesSort/FunnelIssuesSort.tsx +++ b/frontend/app/components/Dashboard/components/Funnels/FunnelIssuesSort/FunnelIssuesSort.tsx @@ -1,6 +1,5 @@ import { useStore } from 'App/mstore'; import React from 'react'; -// import Select from 'Shared/Select'; import { Select } from 'antd'; const sortOptions = [ diff --git a/frontend/app/components/Funnels/FunnelWidget/FunnelWidget.tsx b/frontend/app/components/Funnels/FunnelWidget/FunnelWidget.tsx index ffe3a2efc..11df5f487 100644 --- a/frontend/app/components/Funnels/FunnelWidget/FunnelWidget.tsx +++ b/frontend/app/components/Funnels/FunnelWidget/FunnelWidget.tsx @@ -1,163 +1,187 @@ import React, { useEffect } from 'react'; import Widget from 'App/mstore/types/widget'; -import Funnelbar, { UxTFunnelBar } from "./FunnelBar"; +import Funnelbar, { UxTFunnelBar } from './FunnelBar'; import cn from 'classnames'; import stl from './FunnelWidget.module.css'; import { observer } from 'mobx-react-lite'; import { NoContent, Icon } from 'UI'; import { Tag, Tooltip } from 'antd'; import { useModal } from 'App/components/Modal'; +import { useStore } from '@/mstore'; +import Filter from '@/mstore/types/filter'; interface Props { - metric?: Widget; - isWidget?: boolean; - data: any; + metric?: Widget; + isWidget?: boolean; + data: any; } + function FunnelWidget(props: Props) { - const [focusedFilter, setFocusedFilter] = React.useState(null); - const { isWidget = false, data, metric } = props; - const funnel = data.funnel || { stages: [] }; - const totalSteps = funnel.stages.length; - const stages = isWidget ? [...funnel.stages.slice(0, 1), funnel.stages[funnel.stages.length - 1]] : funnel.stages; - const hasMoreSteps = funnel.stages.length > 2; - const lastStage = funnel.stages[funnel.stages.length - 1]; - const remainingSteps = totalSteps - 2; - const { hideModal } = useModal(); - const metricLabel = metric?.metricFormat == 'userCount' ? 'Users' : 'Sessions'; + const { dashboardStore, searchStore } = useStore(); + const [focusedFilter, setFocusedFilter] = React.useState(null); + const { isWidget = false, data, metric } = props; + const funnel = data.funnel || { stages: [] }; + const totalSteps = funnel.stages.length; + const stages = isWidget ? [...funnel.stages.slice(0, 1), funnel.stages[funnel.stages.length - 1]] : funnel.stages; + const hasMoreSteps = funnel.stages.length > 2; + const lastStage = funnel.stages[funnel.stages.length - 1]; + const remainingSteps = totalSteps - 2; + const { hideModal } = useModal(); + const metricLabel = metric?.metricFormat == 'userCount' ? 'Users' : 'Sessions'; + const drillDownFilter = dashboardStore.drillDownFilter; + const drillDownPeriod = dashboardStore.drillDownPeriod; + const metricFilters = metric?.series[0]?.filter.filters || []; - useEffect(() => { - return () => { - if (isWidget) return; - hideModal(); + const applyDrillDown = (index: number) => { + const filter = new Filter().fromData({ filters: metricFilters.slice(0, index + 1) }); + const periodTimestamps = drillDownPeriod.toTimestamps(); + drillDownFilter.merge({ + filters: filter.toJson().filters, + startTimestamp: periodTimestamps.startTimestamp, + endTimestamp: periodTimestamps.endTimestamp + }); + }; + + useEffect(() => { + return () => { + if (isWidget) return; + hideModal(); + }; + }, []); + + const focusStage = (index: number) => { + funnel.stages.forEach((s, i) => { + // turning on all filters if one was focused already + if (focusedFilter === index) { + s.updateKey('isActive', true); + setFocusedFilter(null); + } else { + setFocusedFilter(index); + if (i === index) { + s.updateKey('isActive', true); + } else { + s.updateKey('isActive', false); } - }, []); + } + }); - const focusStage = (index: number) => { - funnel.stages.forEach((s, i) => { - // turning on all filters if one was focused already - if (focusedFilter === index) { - s.updateKey('isActive', true) - setFocusedFilter(null) - } else { - setFocusedFilter(index) - if (i === index) { - s.updateKey('isActive', true) - } else { - s.updateKey('isActive', false) - } - } - }) - } + applyDrillDown(focusedFilter === index ? -1 : index); + }; - return ( - - - No data available for the selected period. - - } - show={!stages || stages.length === 0} - > -
- { !isWidget && ( - stages.map((filter: any, index: any) => ( - - )) - )} + return ( + + + No data available for the selected period. +
+ } + show={!stages || stages.length === 0} + > +
+ {!isWidget && ( + stages.map((filter: any, index: any) => ( + + )) + )} - { isWidget && ( - <> - + {isWidget && ( + <> + - { hasMoreSteps && ( - <> - - - )} + {hasMoreSteps && ( + <> + + + )} - {funnel.stages.length > 1 && ( - - )} - - )} -
-
-
- Lost conversion - - - {funnel.lostConversions} - - -
-
-
- Total conversion - - - {funnel.totalConversions} - - -
-
- {funnel.totalDropDueToIssues > 0 &&
{funnel.totalDropDueToIssues} sessions dropped due to issues.
} - - ); + {funnel.stages.length > 1 && ( + + )} + + )} +
+
+
+ Lost conversion + + + {funnel.lostConversions} + + +
+
+
+ Total conversion + + + {funnel.totalConversions} + + +
+
+ {funnel.totalDropDueToIssues > 0 &&
{funnel.totalDropDueToIssues} sessions dropped due to issues.
} + + ); } export const EmptyStage = observer(({ total }: any) => { - return ( -
- -
- {`+${total} ${total > 1 ? 'steps' : 'step'}`} -
-
-
- ) -}) + return ( +
+ +
+ {`+${total} ${total > 1 ? 'steps' : 'step'}`} +
+
+
+ ); +}); export const Stage = observer(({ metricLabel, stage, index, isWidget, uxt, focusStage, focusedFilter }: any) => { - return stage ? ( -
- - {!uxt ? : } - {/*{!isWidget && !uxt && }*/} -
- ) : ( - <> - ) -}) + return stage ? ( +
+ + {!uxt ? : } + {/*{!isWidget && !uxt && }*/} +
+ ) : ( + <> + ); +}); export const IndexNumber = observer(({ index }: any) => { - return ( -
- {index === 0 ? : index} -
- ); -}) + return ( +
+ {index === 0 ? : index} +
+ ); +}); const BarActions = observer(({ bar }: any) => { - return ( -
- -
- ) -}) + return ( +
+ +
+ ); +}); export default observer(FunnelWidget); diff --git a/frontend/app/components/Session/Player/MobilePlayer/MobileControls.tsx b/frontend/app/components/Session/Player/MobilePlayer/MobileControls.tsx index 78f9d47d4..275f3e6e2 100644 --- a/frontend/app/components/Session/Player/MobilePlayer/MobileControls.tsx +++ b/frontend/app/components/Session/Player/MobilePlayer/MobileControls.tsx @@ -28,6 +28,7 @@ import { import { useStore } from 'App/mstore'; import { session as sessionRoute, withSiteId } from 'App/routes'; import { SummaryButton } from 'Components/Session_/Player/Controls/Controls'; +import { MobEventsList, WebEventsList } from "../../../Session_/Player/Controls/EventsList"; import useShortcuts from '../ReplayPlayer/useShortcuts'; export const SKIP_INTERVALS = { diff --git a/frontend/app/components/Session/Player/MobilePlayer/PlayerInst.tsx b/frontend/app/components/Session/Player/MobilePlayer/PlayerInst.tsx index c696a8f32..b68a6199b 100644 --- a/frontend/app/components/Session/Player/MobilePlayer/PlayerInst.tsx +++ b/frontend/app/components/Session/Player/MobilePlayer/PlayerInst.tsx @@ -72,7 +72,7 @@ function Player(props: IProps) { React.useEffect(() => { playerContext.player.scale(); - }, [props.bottomBlock, props.fullscreen, playerContext.player, activeTab, fullView]); + }, [bottomBlock, props.fullscreen, playerContext.player, activeTab, fullView]); React.useEffect(() => { playerContext.player.addFullscreenBoundary(props.fullscreen || fullView); diff --git a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx index d7f3a861c..03ccfcfda 100644 --- a/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx +++ b/frontend/app/components/Session/Player/ReplayPlayer/PlayerBlockHeader.tsx @@ -105,7 +105,7 @@ function PlayerBlockHeader(props: any) { )}
-
+
-
-
Traces
- {tabs.length && tab ? ( -
- -
- ) : null} +
+
+
Traces
+ {tabs.length && tab ? ( +
+ +
+ ) : null} +
-
- +
+ + Current Tab + + ), + value: 'current', + disabled: true, + }, + ]} + defaultValue="all" + size="small" + className="rounded-full font-medium" + /> + + } + /> +
- {isPending ? ( - - ) : null} + {isPending ? : null} {isError ? ( - - ) : null} - {isSuccess ? ( - + ) : null} + {isSuccess ? : null} ); @@ -128,8 +146,10 @@ const LogsTable = observer(({ data }: { data: UnifiedLog[] }) => { const _list = React.useRef(null); const activeIndex = React.useMemo(() => { const currTs = time + sessionStart; - const index = data.findIndex( - (log) => log.timestamp !== 'N/A' ? new Date(log.timestamp).getTime() >= currTs : false + const index = data.findIndex((log) => + log.timestamp !== 'N/A' + ? new Date(log.timestamp).getTime() >= currTs + : false ); return index === -1 ? data.length - 1 : index; }, [time, data.length]); @@ -141,17 +161,22 @@ const LogsTable = observer(({ data }: { data: UnifiedLog[] }) => { const onJump = (ts: number) => { player.jump(ts - sessionStart); - } + }; return ( <> {data.map((log, index) => ( - + ))} - ) + ); }); export default observer(BackendLogsPanel); diff --git a/frontend/app/components/Session/Player/SharedComponents/BackendLogs/StatusMessages.tsx b/frontend/app/components/Session/Player/SharedComponents/BackendLogs/StatusMessages.tsx index ae44651d2..1f22dc589 100644 --- a/frontend/app/components/Session/Player/SharedComponents/BackendLogs/StatusMessages.tsx +++ b/frontend/app/components/Session/Player/SharedComponents/BackendLogs/StatusMessages.tsx @@ -12,7 +12,7 @@ export function LoadingFetch({ provider }: { provider: string }) { 'w-full h-full flex items-center justify-center flex-col gap-2' } > - +
Fetching logs from {provider}...
); @@ -33,16 +33,23 @@ export function FailedFetch({ 'w-full h-full flex flex-col items-center justify-center gap-2' } > - -
+ +
+ Failed to fetch logs from {provider}. -
+
+ +
+ +
-
-
history.push(intPath)}> + + +
+
); } diff --git a/frontend/app/components/Session/Player/TagWatch/TagWatch.tsx b/frontend/app/components/Session/Player/TagWatch/TagWatch.tsx index 5cca54ee5..75781a672 100644 --- a/frontend/app/components/Session/Player/TagWatch/TagWatch.tsx +++ b/frontend/app/components/Session/Player/TagWatch/TagWatch.tsx @@ -2,14 +2,51 @@ import { useStore } from 'App/mstore'; import SaveModal from 'Components/Session/Player/TagWatch/SaveModal'; import React from 'react'; import { PlayerContext } from 'Components/Session/playerContext'; -import { Button, Input } from 'antd'; -import { CopyButton } from 'UI'; -import { SearchOutlined, ZoomInOutlined } from '@ant-design/icons'; +import { Button, Input, Tooltip } from 'antd'; +import { CopyOutlined } from '@ant-design/icons'; +import { ZoomInOutlined } from '@ant-design/icons'; import { observer } from 'mobx-react-lite'; import { useModal } from 'App/components/Modal'; import { toast } from 'react-toastify'; -import { FilterKey } from "App/types/filter/filterType"; -import { addOptionsToFilter } from "App/types/filter/newFilter"; +import { FilterKey } from 'App/types/filter/filterType'; +import { addOptionsToFilter } from 'App/types/filter/newFilter'; + +interface CopyableTextAreaProps { + selector: string; + setSelector: (value: string) => void; +} + +const CopyableTextArea: React.FC = ({ selector, setSelector }) => { + const handleCopy = () => { + navigator.clipboard.writeText(selector); + }; + + return ( +
+ setSelector(e.target.value)} + className="rounded-lg font-mono text-sm placeholder:font-sans placeholder:text-base placeholder:text-gray-400" + rows={4} + style={{ paddingRight: '40px' }} + placeholder='Enter selector to tag elements. E.g. .btn-primary' + /> + +
+ ); +}; function TagWatch() { const { tagWatchStore, searchStore } = useStore(); @@ -50,7 +87,7 @@ function TagWatch() { ignoreClickRage: ignoreClRage, ignoreDeadClick: ignoreDeadCl, }); - const tags = await tagWatchStore.getTags() + const tags = await tagWatchStore.getTags(); if (tags) { addOptionsToFilter( FilterKey.TAGGED_ELEMENT, @@ -58,42 +95,41 @@ function TagWatch() { ); searchStore.refreshFilterOptions(); } - // @ts-ignore toast.success('Tag created'); setSelector(''); - return tag + return tag; } catch { - // @ts-ignore toast.error('Failed to create tag'); } }; + const openSaveModal = () => { if (selector === '') { return; } showModal(, { right: true, width: 400 }); }; + return ( -
-
-
Element Selector
- +
+
+

Select elements in the session play area to tag by class selector and filter sessions to verify their rendering.

+
- setSelector(e.target.value)} /> + + + -
- Create and filter sessions by ‘watch elements’ to determine if they rendered or not. -
); } -export default observer(TagWatch); +export default observer(TagWatch); \ No newline at end of file diff --git a/frontend/app/components/Session/RightBlock.tsx b/frontend/app/components/Session/RightBlock.tsx index a09003d3c..e0e63fddf 100644 --- a/frontend/app/components/Session/RightBlock.tsx +++ b/frontend/app/components/Session/RightBlock.tsx @@ -18,7 +18,7 @@ function RightBlock({ switch (activeTab) { case 'EVENTS': return ( -
+
); diff --git a/frontend/app/components/Session/Tabs/Tabs.tsx b/frontend/app/components/Session/Tabs/Tabs.tsx index bbf3d5b34..1bd88d627 100644 --- a/frontend/app/components/Session/Tabs/Tabs.tsx +++ b/frontend/app/components/Session/Tabs/Tabs.tsx @@ -22,6 +22,7 @@ const Tabs = ({ tabs, active, onClick, border = true, className }: Props) => { return (
), value: key, - disabled: disabled, + disabled: disabled, }))} />
diff --git a/frontend/app/components/Session_/EventsBlock/Event.tsx b/frontend/app/components/Session_/EventsBlock/Event.tsx index 8d212e578..227a97369 100644 --- a/frontend/app/components/Session_/EventsBlock/Event.tsx +++ b/frontend/app/components/Session_/EventsBlock/Event.tsx @@ -159,7 +159,7 @@ const Event: React.FC = ({ >
{event.type && iconName ? ( @@ -169,20 +169,18 @@ const Event: React.FC = ({ )}
-
+
- + {title} {body && !isLocation && ( )} @@ -202,8 +200,7 @@ const Event: React.FC = ({ {isLocation && (
diff --git a/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js b/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js index ff91d7198..9ae5f7808 100644 --- a/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js +++ b/frontend/app/components/Session_/EventsBlock/EventGroupWrapper.js @@ -8,6 +8,7 @@ import { Icon, TextEllipsis } from 'UI'; import Event from './Event'; import NoteEvent from './NoteEvent'; import stl from './eventGroupWrapper.module.css'; +import cn from 'classnames' function EventGroupWrapper(props) { const { userStore } = useStore(); @@ -132,7 +133,7 @@ function EventGroupWrapper(props) { {isFirst && isLocation && event.referrer && (
- Referrer: {safeRef} + Referrer: {safeRef}
)} diff --git a/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js b/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js index 4f7061501..26841c990 100644 --- a/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js +++ b/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js @@ -1,11 +1,12 @@ import React from 'react'; -import { Input, Button } from 'UI'; +import {Input, Button, Tooltip} from 'antd'; +import {CloseOutlined, SearchOutlined} from '@ant-design/icons'; import { PlayerContext } from 'App/components/Session/playerContext'; function EventSearch(props) { const { player } = React.useContext(PlayerContext); - const { onChange, value, header, setActiveTab } = props; + const { onChange, value, header, setActiveTab, eventsText } = props; const toggleEvents = () => player.toggleEvents(); @@ -16,25 +17,25 @@ function EventSearch(props) { } /> -
diff --git a/frontend/app/components/Session_/EventsBlock/EventsBlock.tsx b/frontend/app/components/Session_/EventsBlock/EventsBlock.tsx index e322e48fb..1b555bb6c 100644 --- a/frontend/app/components/Session_/EventsBlock/EventsBlock.tsx +++ b/frontend/app/components/Session_/EventsBlock/EventsBlock.tsx @@ -196,7 +196,7 @@ function EventsBlock(props: IProps) { return ( <> -
+
{uxtestingStore.isUxt() ? (
) : null} -
+
-
{eventsText}
{ + if (showSingleTab) { + const stackEventList = tabStates[currentTab].stackList; + const frustrationsList = tabStates[currentTab].frustrationsList; + const exceptionsList = tabStates[currentTab].exceptionsList; + const resourceListUnmap = tabStates[currentTab].resourceList; + const fetchList = tabStates[currentTab].fetchList; + const graphqlList = tabStates[currentTab].graphqlList; + const performanceChartData = + tabStates[currentTab].performanceChartData; + + return { + stackEventList, + frustrationsList, + exceptionsList, + resourceListUnmap, + fetchList, + graphqlList, + performanceChartData, + } + } else { + const stackEventList = tabValues.flatMap((tab) => tab.stackList); + // these two are global + const frustrationsList = tabValues[0].frustrationsList; + const exceptionsList = tabValues[0].exceptionsList; + // we can't compute global chart data because some tabs coexist + const performanceChartData: any = []; + const resourceListUnmap = tabValues.flatMap((tab) => tab.resourceList); + const fetchList = tabValues.flatMap((tab) => tab.fetchList); + const graphqlList = tabValues.flatMap((tab) => tab.graphqlList); + + return { + stackEventList, + frustrationsList, + exceptionsList, + resourceListUnmap, + fetchList, + graphqlList, + performanceChartData, + } + } + }, [tabStates, currentTab, dataSource, tabValues]); const fetchPresented = fetchList.length > 0; const resourceList = resourceListUnmap @@ -168,7 +216,18 @@ function WebOverviewPanelCont() { PERFORMANCE: checkInZoomRange(performanceChartData), FRUSTRATIONS: checkInZoomRange(frustrationsList), }; - }, [tabStates, currentTab, zoomEnabled, zoomStartTs, zoomEndTs]); + }, [ + tabStates, + currentTab, + zoomEnabled, + zoomStartTs, + zoomEndTs, + resourceList.length, + exceptionsList.length, + stackEventList.length, + performanceChartData.length, + frustrationsList.length, + ]); const originStr = window.env.ORIGIN || window.location.origin; const isSaas = /app\.openreplay\.com/.test(originStr); @@ -187,6 +246,7 @@ function WebOverviewPanelCont() { sessionId={sessionId} setZoomTab={setZoomTab} zoomTab={zoomTab} + showSingleTab={showSingleTab} /> ); } @@ -238,6 +298,7 @@ function PanelComponent({ spotTime, spotEndTime, onClose, + showSingleTab, }: any) { return ( @@ -280,12 +341,13 @@ function PanelComponent({ ) : null}
{isSpot ? null : ( -
- +
+ {!isMobile ? : null} +
)} @@ -302,12 +364,19 @@ function PanelComponent({ style={{ height: '60px', minHeight: 'unset', padding: 0 }} title={
- + Select a debug option to visualize on timeline.
} > - {isSpot ? : } + {isSpot ? ( + + ) : ( + + )} {selectedFeatures.map((feature: any, index: number) => (
( { - const { title, className, list = [], endTime = 0, isGraph = false, message = '' } = props; + const { title, className, list = [], endTime = 0, isGraph = false, message = '', disabled } = props; const scale = 100 / endTime; const _list = isGraph ? [] : @@ -82,7 +85,7 @@ const EventRow = React.memo((props: Props) => { } return groupedItems; - }, [list]); + }, [list.length]); return (
{ >
{title}
- {message ? : null} + + + +
{isGraph ? ( - + ) : _list.length > 0 ? ( _list.map((item: { items: any[], left: number, isGrouped: boolean }, index: number) => { const left = item.left @@ -123,7 +129,7 @@ const EventRow = React.memo((props: Props) => { ); }) ) : ( -
+
None captured.
)} @@ -133,11 +139,3 @@ const EventRow = React.memo((props: Props) => { }); export default EventRow; - -function RowInfo({ message }: any) { - return ( - - - - ); -} diff --git a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx index f36bccbc8..019a1e3fc 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx @@ -1,6 +1,8 @@ import React from 'react'; -import { Popover, Checkbox } from 'antd'; +import { Popover, Checkbox, Button } from 'antd'; +import {EyeInvisibleOutlined} from '@ant-design/icons'; import { Icon } from 'UI' +import Funnel from '@/types/funnel'; const NETWORK = 'NETWORK'; const ERRORS = 'ERRORS'; @@ -59,7 +61,7 @@ function FeatureSelection(props: Props) { +
toggleAllFeatures()} @@ -81,10 +83,9 @@ function FeatureSelection(props: Props) {
} > -
- -
X-Ray Events
-
+ ); diff --git a/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx b/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx index 2e719f377..51a5a0898 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx @@ -1,82 +1,107 @@ import React from 'react'; import { AreaChart, Area, ResponsiveContainer } from 'recharts'; +import {InfoCircleOutlined} from '@ant-design/icons' interface Props { - list: any; + list: any; + disabled?: boolean; } const PerformanceGraph = React.memo((props: Props) => { - const { list } = props; + const { list, disabled } = props; - const finalValues = React.useMemo(() => { - const cpuMax = list.reduce((acc: number, item: any) => { - return Math.max(acc, item.cpu); - }, 0); - const cpuMin = list.reduce((acc: number, item: any) => { - return Math.min(acc, item.cpu); - }, Infinity); + const finalValues = React.useMemo(() => { + const cpuMax = list.reduce((acc: number, item: any) => { + return Math.max(acc, item.cpu); + }, 0); + const cpuMin = list.reduce((acc: number, item: any) => { + return Math.min(acc, item.cpu); + }, Infinity); - const memoryMin = list.reduce((acc: number, item: any) => { - return Math.min(acc, item.usedHeap); - }, Infinity); - const memoryMax = list.reduce((acc: number, item: any) => { - return Math.max(acc, item.usedHeap); - }, 0); + const memoryMin = list.reduce((acc: number, item: any) => { + return Math.min(acc, item.usedHeap); + }, Infinity); + const memoryMax = list.reduce((acc: number, item: any) => { + return Math.max(acc, item.usedHeap); + }, 0); - const convertToPercentage = (val: number, max: number, min: number) => { - return ((val - min) / (max - min)) * 100; - }; - const cpuValues = list.map((item: any) => convertToPercentage(item.cpu, cpuMax, cpuMin)); - const memoryValues = list.map((item: any) => convertToPercentage(item.usedHeap, memoryMax, memoryMin)); - const mergeArraysWithMaxNumber = (arr1: any[], arr2: any[]) => { - const maxLength = Math.max(arr1.length, arr2.length); - const result = []; - for (let i = 0; i < maxLength; i++) { - const num = Math.round(Math.max(arr1[i] || 0, arr2[i] || 0)); - result.push(num > 60 ? num : 1); - } - return result; - }; - const finalValues = mergeArraysWithMaxNumber(cpuValues, memoryValues); - return finalValues; - }, []); - - const data = list.map((item: any, index: number) => { - return { - time: item.time, - cpu: finalValues[index], - }; - }); - - return ( - - - - - - - - - {/* */} - - - + const convertToPercentage = (val: number, max: number, min: number) => { + return ((val - min) / (max - min)) * 100; + }; + const cpuValues = list.map((item: any) => + convertToPercentage(item.cpu, cpuMax, cpuMin) ); + const memoryValues = list.map((item: any) => + convertToPercentage(item.usedHeap, memoryMax, memoryMin) + ); + const mergeArraysWithMaxNumber = (arr1: any[], arr2: any[]) => { + const maxLength = Math.max(arr1.length, arr2.length); + const result = []; + for (let i = 0; i < maxLength; i++) { + const num = Math.round(Math.max(arr1[i] || 0, arr2[i] || 0)); + result.push(num > 60 ? num : 1); + } + return result; + }; + const finalValues = mergeArraysWithMaxNumber(cpuValues, memoryValues); + return finalValues; + }, [list.length]); + + const data = list.map((item: any, index: number) => { + return { + time: item.time, + cpu: finalValues[index], + }; + }); + + return ( +
+ {disabled ? ( +
+
+ Multi-tab performance overview is not available. +
+
+ ) : null} + + + + + + + + + {/* */} + + + +
+ ); }); export default PerformanceGraph; diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/Dots.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/Dots.tsx index a2c38b447..25606086d 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/Dots.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/Dots.tsx @@ -71,7 +71,7 @@ export function FrustrationElement({ item, createEventClickHandler }: CommonProp const elData = getFrustration(item); return ( {elData.name} diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx index cc7f13f63..4518ddf1c 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx @@ -168,7 +168,7 @@ function GroupedIssue({
{items.length} diff --git a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx index b1c00b62b..644c5dbbb 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx @@ -1,12 +1,14 @@ import React, { useEffect, useState } from 'react'; -import { Loader, Icon } from 'UI'; +import { Loader } from 'UI'; +import {Button, Tooltip} from 'antd'; +import {CloseOutlined} from '@ant-design/icons'; import { observer } from 'mobx-react-lite'; import { useStore } from 'App/mstore'; import SelectorsList from './components/SelectorsList/SelectorsList'; import { PlayerContext } from 'App/components/Session/playerContext'; import { compareJsonObjects } from 'App/utils'; -import Select from 'Shared/Select'; +import {Select, Form} from 'antd'; const JUMP_OFFSET = 1000; interface Props { @@ -58,34 +60,29 @@ function PageInsightsPanel({ setActiveTab }: Props) { }; return ( -
-
-
- Clicks -
-
{ - setActiveTab(''); - }} - className="ml-auto flex items-center justify-center bg-white cursor-pointer" - > - -
-
-
-
In Page
- + + +
diff --git a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.module.css b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.module.css index 723c2fd1f..f7b7ee406 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.module.css +++ b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.module.css @@ -1,7 +1,5 @@ .wrapper { - padding: 10px; - box-shadow: 1px 1px 1px rgba(0, 0, 0, 0.2); - border-radius: 3px; + padding: 1rem; background-color: $gray-lightest; margin-bottom: 15px; @@ -18,8 +16,6 @@ border-radius: 10px; background-color: $tealx; flex-shrink: 0; - border: solid thin white; - box-shadow: 1px 1px 1px rgba(0, 0, 0, 0.1); display: flex; align-items: center; justify-content: center; diff --git a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx index 5d6e5da2a..72eeca631 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx @@ -17,20 +17,20 @@ export default function SelectorCard({ index = 1, target, showContent }: Props) return ( // @ts-ignore TODO for Alex -
activeTarget(index)}> +
activeTarget(index)}>
{/* @ts-ignore */}
{index + 1}
-
{target.selector}
+
{target.selector}
{showContent && (
- {target.count} Clicks - {target.percent}% + {target.count} Click{target.count > 1 ? 's' : ''} - {target.percent}%
-
TOTAL CLICKS
+
TOTAL CLICKS
)}
diff --git a/frontend/app/components/Session_/Performance/Performance.tsx b/frontend/app/components/Session_/Performance/Performance.tsx index c0cdceb39..c80a26b6b 100644 --- a/frontend/app/components/Session_/Performance/Performance.tsx +++ b/frontend/app/components/Session_/Performance/Performance.tsx @@ -17,12 +17,14 @@ import { } from 'recharts'; import { durationFromMsFormatted } from 'App/date'; import { formatBytes } from 'App/utils'; +import {Tooltip as TooltipANT} from 'antd'; import stl from './performance.module.css'; import BottomBlock from '../BottomBlock'; import InfoLine from '../BottomBlock/InfoLine'; import { useStore } from 'App/mstore' +import { Segmented } from 'antd' const CPU_VISUAL_OFFSET = 10; @@ -457,15 +459,33 @@ function Performance() { return ( -
-
Performance
- - - +
+
+
Performance
+ + + +
+ +
+ + All Tabs + + ), value: 'all', disabled: true, }, + { label: 'Current Tab', value: 'current' }, + ]} + defaultValue="current" + size="small" + className="rounded-full font-medium" + /> +
diff --git a/frontend/app/components/Session_/Player/Controls/Controls.tsx b/frontend/app/components/Session_/Player/Controls/Controls.tsx index 0bddb0a23..d2dd042c0 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.tsx +++ b/frontend/app/components/Session_/Player/Controls/Controls.tsx @@ -34,6 +34,7 @@ import { Icon } from 'UI'; import LogsButton from 'App/components/Session/Player/SharedComponents/BackendLogs/LogsButton'; import ControlButton from './ControlButton'; +import { WebEventsList } from "./EventsList"; import Timeline from './Timeline'; import PlayerControls from './components/PlayerControls'; import styles from './controls.module.css'; diff --git a/frontend/app/components/Session_/Player/Controls/EventsList.tsx b/frontend/app/components/Session_/Player/Controls/EventsList.tsx index 41010c766..e9994771a 100644 --- a/frontend/app/components/Session_/Player/Controls/EventsList.tsx +++ b/frontend/app/components/Session_/Player/Controls/EventsList.tsx @@ -4,10 +4,12 @@ import { PlayerContext, MobilePlayerContext } from 'Components/Session/playerCon import { observer } from 'mobx-react-lite'; import { getTimelinePosition } from './getTimelinePosition' -function EventsList({ scale }: { scale: number }) { +function EventsList() { const { store } = useContext(PlayerContext); - const { tabStates, eventCount } = store.get(); + const { eventCount, endTime } = store.get(); + const tabStates = store.get().tabStates; + const scale = 100 / endTime; const events = React.useMemo(() => { return Object.values(tabStates)[0]?.eventList.filter(e => e.time) || []; }, [eventCount]); @@ -34,11 +36,12 @@ function EventsList({ scale }: { scale: number }) { ); } -function MobileEventsList({ scale }: { scale: number }) { +function MobileEventsList() { const { store } = useContext(MobilePlayerContext); - const { eventList } = store.get(); + const { eventList, endTime } = store.get(); const events = eventList.filter(e => e.type !== 'SWIPE') + const scale = 100/endTime; return ( <> {events.map((e) => ( diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.tsx b/frontend/app/components/Session_/Player/Controls/Timeline.tsx index 0888df62b..d40fa112b 100644 --- a/frontend/app/components/Session_/Player/Controls/Timeline.tsx +++ b/frontend/app/components/Session_/Player/Controls/Timeline.tsx @@ -13,11 +13,7 @@ import NotesList from './NotesList'; import SkipIntervalsList from './SkipIntervalsList'; import TimelineTracker from 'Components/Session_/Player/Controls/TimelineTracker'; -interface IProps { - isMobile?: boolean; -} - -function Timeline(props: IProps) { +function Timeline({ isMobile }: { isMobile: boolean }) { const { player, store } = useContext(PlayerContext); const [wasPlaying, setWasPlaying] = useState(false); const [maxWidth, setMaxWidth] = useState(0); @@ -158,7 +154,7 @@ function Timeline(props: IProps) { {devtoolsLoading || domLoading || !ready ?
: null}
- {props.isMobile ? : } + {isMobile ? : } diff --git a/frontend/app/components/Session_/Player/Controls/components/TimelineZoomButton.tsx b/frontend/app/components/Session_/Player/Controls/components/TimelineZoomButton.tsx index 5db0182c8..c4e8763a4 100644 --- a/frontend/app/components/Session_/Player/Controls/components/TimelineZoomButton.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/TimelineZoomButton.tsx @@ -29,7 +29,7 @@ function TimelineZoomButton() { }, []) return ( - diff --git a/frontend/app/components/Session_/Storage/Storage.tsx b/frontend/app/components/Session_/Storage/Storage.tsx index 2b138728e..43c5fd851 100644 --- a/frontend/app/components/Session_/Storage/Storage.tsx +++ b/frontend/app/components/Session_/Storage/Storage.tsx @@ -1,18 +1,24 @@ import React from 'react'; -import { useStore } from 'App/mstore' +import { useStore } from 'App/mstore'; import { PlayerContext } from 'App/components/Session/playerContext'; import { observer } from 'mobx-react-lite'; import { JSONTree, NoContent, Tooltip } from 'UI'; import { formatMs } from 'App/date'; -import diff from 'microdiff' -import { STORAGE_TYPES, selectStorageList, selectStorageListNow, selectStorageType } from 'Player'; +import diff from 'microdiff'; +import { + STORAGE_TYPES, + selectStorageList, + selectStorageListNow, + selectStorageType, +} from 'Player'; import Autoscroll from '../Autoscroll'; import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; import cn from 'classnames'; import stl from './storage.module.css'; -import logger from "App/logger"; -import ReduxViewer from './ReduxViewer' +import logger from 'App/logger'; +import ReduxViewer from './ReduxViewer'; +import { Segmented } from 'antd' function getActionsName(type: string) { switch (type) { @@ -31,7 +37,7 @@ const storageDecodeKeys = { [STORAGE_TYPES.ZUSTAND]: ['state', 'mutation'], [STORAGE_TYPES.MOBX]: ['payload'], [STORAGE_TYPES.NONE]: ['state, action', 'payload', 'mutation'], -} +}; function Storage() { const { uiPlayerStore } = useStore(); @@ -42,49 +48,48 @@ function Storage() { const [stateObject, setState] = React.useState({}); const { player, store } = React.useContext(PlayerContext); - const { tabStates, currentTab } = store.get() - const state = tabStates[currentTab] || {} + const { tabStates, currentTab } = store.get(); + const state = tabStates[currentTab] || {}; const listNow = selectStorageListNow(state) || []; const list = selectStorageList(state) || []; - const type = selectStorageType(state) || STORAGE_TYPES.NONE + const type = selectStorageType(state) || STORAGE_TYPES.NONE; React.useEffect(() => { let currentState; if (listNow.length === 0) { - currentState = decodeMessage(list[0]) + currentState = decodeMessage(list[0]); } else { - currentState = decodeMessage(listNow[listNow.length - 1]) + currentState = decodeMessage(listNow[listNow.length - 1]); } - const stateObj = currentState?.state || currentState?.payload?.state || {} + const stateObj = currentState?.state || currentState?.payload?.state || {}; const newState = Object.assign(stateObject, stateObj); setState(newState); - }, [listNow.length]); const decodeMessage = (msg: any) => { const decoded = {}; - const pureMSG = { ...msg } + const pureMSG = { ...msg }; const keys = storageDecodeKeys[type]; try { - keys.forEach(key => { + keys.forEach((key) => { if (pureMSG[key]) { // @ts-ignore TODO: types for decoder decoded[key] = player.decodeMessage(pureMSG[key]); } }); } catch (e) { - logger.error("Error on message decoding: ", e, pureMSG); + logger.error('Error on message decoding: ', e, pureMSG); return null; } return { ...pureMSG, ...decoded }; - } + }; const decodedList = React.useMemo(() => { - return listNow.map(msg => { - return decodeMessage(msg) - }) - }, [listNow.length]) + return listNow.map((msg) => { + return decodeMessage(msg); + }); + }, [listNow.length]); const focusNextButton = () => { if (lastBtnRef.current) { @@ -99,7 +104,10 @@ function Storage() { focusNextButton(); }, [listNow]); - const renderDiff = (item: Record, prevItem?: Record) => { + const renderDiff = ( + item: Record, + prevItem?: Record + ) => { if (!showDiffs) { return; } @@ -113,7 +121,10 @@ function Storage() { if (!stateDiff) { return ( -
+
No diff
); @@ -121,13 +132,15 @@ function Storage() { return (
- {stateDiff.map((d: Record, i: number) => renderDiffs(d, i))} + {stateDiff.map((d: Record, i: number) => + renderDiffs(d, i) + )}
); }; const renderDiffs = (diff: Record, i: number) => { - const path = diff.path.join('.') + const path = diff.path.join('.'); return ( @@ -145,12 +158,16 @@ function Storage() { player.jump(list[listNow.length].time); }; - const renderItem = (item: Record, i: number, prevItem?: Record) => { + const renderItem = ( + item: Record, + i: number, + prevItem?: Record + ) => { let src; let name; - const itemD = item - const prevItemD = prevItem ? prevItem : undefined + const itemD = item; + const prevItemD = prevItem ? prevItem : undefined; switch (type) { case STORAGE_TYPES.REDUX: @@ -177,7 +194,10 @@ function Storage() { return (
{src === null ? ( @@ -187,7 +207,10 @@ function Storage() { ) : ( <> {renderDiff(itemD, prevItemD)} -
+
{typeof item?.duration === 'number' && ( -
{formatMs(itemD.duration)}
+
+ {formatMs(itemD.duration)} +
)}
{i + 1 < listNow.length && ( - )} @@ -222,31 +250,36 @@ function Storage() { }; if (type === STORAGE_TYPES.REDUX) { - return + return ; } return ( {/*@ts-ignore*/} <> - {list.length > 0 && ( -
-

- {'STATE'} -

- {showDiffs ? ( -

- DIFFS -

- ) : null} -

- {getActionsName(type)} -

-

- TTE -

+
+
+

{'STATE'}

- )} + {showDiffs ? ( +

+ DIFFS +

+ ) : null} +

+ {getActionsName(type)} +

+

+ TTE +

+ +

- @@ -322,8 +358,7 @@ function Storage() { {'Empty state.'}
) : ( - + )}
@@ -342,7 +377,6 @@ function Storage() { export default observer(Storage); - /** * TODO: compute diff and only decode the required parts * WIP example @@ -384,4 +418,4 @@ export default observer(Storage); * }, [list.length]) * } * - * */ \ No newline at end of file + * */ diff --git a/frontend/app/components/Session_/WarnBadge.tsx b/frontend/app/components/Session_/WarnBadge.tsx index 704ae619e..3d6706e5f 100644 --- a/frontend/app/components/Session_/WarnBadge.tsx +++ b/frontend/app/components/Session_/WarnBadge.tsx @@ -106,11 +106,11 @@ const WarnBadge = React.memo( >
- Tracker version({version}) for this recording is{' '} + Tracker version ({version}) for this recording is{' '} {trackerVerDiff === VersionComparison.Lower ? 'lower ' : 'ahead of '} - the current({trackerVersion}) version. + the current ({trackerVersion}) version.
Some recording might display incorrectly. diff --git a/frontend/app/components/Spots/SpotPlayer/components/Panels/SpotConsole.tsx b/frontend/app/components/Spots/SpotPlayer/components/Panels/SpotConsole.tsx index b5cec283c..10893b8fa 100644 --- a/frontend/app/components/Spots/SpotPlayer/components/Panels/SpotConsole.tsx +++ b/frontend/app/components/Spots/SpotPlayer/components/Panels/SpotConsole.tsx @@ -70,6 +70,7 @@ function SpotConsole({ onClose }: { onClose: () => void }) { jump={jump} iconProps={getIconProps(log.level)} renderWithNL={renderWithNL} + showSingleTab /> ))} diff --git a/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx b/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx index 427e95d8d..e46a5547b 100644 --- a/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx +++ b/frontend/app/components/Spots/SpotPlayer/components/SpotPlayerHeader.tsx @@ -143,7 +143,7 @@ function SpotPlayerHeader({ {browserVersion && ( <>
·
-
Chrome v{browserVersion}
+
Chromium v{browserVersion}
)} {resolution && ( diff --git a/frontend/app/components/shared/DevTools/BottomBlock/InfoLine.js b/frontend/app/components/shared/DevTools/BottomBlock/InfoLine.js index 3059c70d3..c4f11d36d 100644 --- a/frontend/app/components/shared/DevTools/BottomBlock/InfoLine.js +++ b/frontend/app/components/shared/DevTools/BottomBlock/InfoLine.js @@ -3,15 +3,15 @@ import cn from 'classnames'; import cls from './infoLine.module.css'; const InfoLine = ({ children }) => ( -
+
{ children }
) const Point = ({ label = '', value = '', display=true, color, dotColor }) => display - ?
+ ?
{ dotColor != null &&
} - { `${label}` } { value } + { `${label}` } { value }
: null; diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx index b96b94e07..ce6975684 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx @@ -1,7 +1,9 @@ import React, { useEffect, useRef, useState, useMemo } from 'react'; import { LogLevel, ILog } from 'Player'; import BottomBlock from '../BottomBlock'; -import { Tabs, Input, Icon, NoContent } from 'UI'; +import { Tabs, Icon, NoContent } from 'UI'; +import {Input} from 'antd'; +import {SearchOutlined, InfoCircleOutlined} from '@ant-design/icons'; import cn from 'classnames'; import ConsoleRow from '../ConsoleRow'; import { PlayerContext } from 'App/components/Session/playerContext'; @@ -9,6 +11,7 @@ import { observer } from 'mobx-react-lite'; import { useStore } from 'App/mstore'; import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal'; import { useModal } from 'App/components/Modal'; +import TabSelector from "../TabSelector"; import useAutoscroll, { getLastItemTime } from '../useAutoscroll'; import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter'; import { VList, VListHandle } from "virtua"; @@ -93,6 +96,7 @@ function ConsolePanel({ sessionStore: { devTools }, uiPlayerStore, } = useStore(); + const zoomEnabled = uiPlayerStore.timelineZoom.enabled; const zoomStartTs = uiPlayerStore.timelineZoom.startTs; const zoomEndTs = uiPlayerStore.timelineZoom.endTs; @@ -109,29 +113,34 @@ function ConsolePanel({ const jump = (t: number) => player.jump(t); const { currentTab, tabStates } = store.get(); - const { - logList = [], - exceptionsList = [], - logListNow = [], - exceptionsListNow = [], - } = tabStates[currentTab] ?? {}; + const tabsArr = Object.keys(tabStates); + const tabValues = Object.values(tabStates); + const dataSource = uiPlayerStore.dataSource; + const showSingleTab = dataSource === 'current'; + const { logList = [], exceptionsList = [], logListNow = [], exceptionsListNow = [] } = React.useMemo(() => { + if (showSingleTab) { + return tabStates[currentTab] ?? {}; + } else { + const logList = tabValues.flatMap(tab => tab.logList); + const exceptionsList = tabValues.flatMap(tab => tab.exceptionsList); + const logListNow = isLive ? tabValues.flatMap(tab => tab.logListNow) : []; + const exceptionsListNow = isLive ? tabValues.flatMap(tab => tab.exceptionsListNow) : []; + return { logList, exceptionsList, logListNow, exceptionsListNow } + } + }, [currentTab, tabStates, dataSource, tabValues, isLive]) + const getTabNum = (tab: string) => (tabsArr.findIndex((t) => t === tab) + 1); - const list = isLive - ? (useMemo( - () => logListNow.concat(exceptionsListNow).sort((a, b) => a.time - b.time), - [logListNow.length, exceptionsListNow.length] - ) as ILog[]) - : (useMemo( - () => logList.concat(exceptionsList).sort((a, b) => a.time - b.time), - [logList.length, exceptionsList.length] - ).filter((l) => - zoomEnabled ? l.time >= zoomStartTs && l.time <= zoomEndTs : true - ) as ILog[]); + const list = useMemo(() => { + if (isLive) { + return logListNow.concat(exceptionsListNow).sort((a, b) => a.time - b.time) + } else { + const logs = logList.concat(exceptionsList).sort((a, b) => a.time - b.time) + return zoomEnabled ? logs.filter(l => l.time >= zoomStartTs && l.time <= zoomEndTs) : logs + } + }, [isLive, logList.length, exceptionsList.length, logListNow.length, exceptionsListNow.length, zoomEnabled, zoomStartTs, zoomEndTs]) let filteredList = useRegExListFilterMemo(list, (l) => l.value, filter); filteredList = useTabListFilterMemo(filteredList, (l) => LEVEL_TAB[l.level], ALL, activeTab); - React.useEffect(() => { - }, [activeTab, filter]); const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab }); const onFilterChange = ({ target: { value } }: any) => devTools.update(INDEX_KEY, { filter: value }); @@ -180,23 +189,26 @@ function ConsolePanel({ Console
- +
+ + } + /> +
{/* @ts-ignore */} {/* @ts-ignore */} - +
+ No Data
} @@ -211,6 +223,8 @@ function ConsolePanel({ iconProps={getIconProps(log.level)} renderWithNL={renderWithNL} onClick={() => showDetails(log)} + showSingleTab={showSingleTab} + getTabNum={getTabNum} /> ))} diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/MobileConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/MobileConsolePanel.tsx index 83de3bb6b..ea1aa4923 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/MobileConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/MobileConsolePanel.tsx @@ -1,10 +1,13 @@ import React, { useEffect, useRef, useState } from 'react'; import { LogLevel, ILog } from 'Player'; import BottomBlock from '../BottomBlock'; -import { Tabs, Input, Icon, NoContent } from 'UI'; +import { Tabs, Input, NoContent } from 'UI'; import cn from 'classnames'; import ConsoleRow from '../ConsoleRow'; -import { IOSPlayerContext, MobilePlayerContext } from 'App/components/Session/playerContext'; +import { + IOSPlayerContext, + MobilePlayerContext, +} from 'App/components/Session/playerContext'; import { observer } from 'mobx-react-lite'; import { VList, VListHandle } from 'virtua'; import { useStore } from 'App/mstore'; @@ -12,6 +15,7 @@ import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorD import { useModal } from 'App/components/Modal'; import useAutoscroll, { getLastItemTime } from '../useAutoscroll'; import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter'; +import { InfoCircleOutlined, SearchOutlined } from '@ant-design/icons'; const ALL = 'ALL'; const INFO = 'INFO'; @@ -26,7 +30,10 @@ const LEVEL_TAB = { [LogLevel.EXCEPTION]: ERRORS, } as const; -const TABS = [ALL, ERRORS, WARNINGS, INFO].map((tab) => ({ text: tab, key: tab })); +const TABS = [ALL, ERRORS, WARNINGS, INFO].map((tab) => ({ + text: tab, + key: tab, +})); function renderWithNL(s: string | null = '') { if (typeof s !== 'string') return ''; @@ -73,20 +80,23 @@ function MobileConsolePanel() { const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); const { showModal } = useModal(); - const { player, store } = React.useContext(MobilePlayerContext); + const { player, store } = + React.useContext(MobilePlayerContext); const jump = (t: number) => player.jump(t); - const { - logList, - logListNow, - exceptionsListNow, - } = store.get(); + const { logList, logListNow, exceptionsListNow } = store.get(); const list = logList as ILog[]; let filteredList = useRegExListFilterMemo(list, (l) => l.value, filter); - filteredList = useTabListFilterMemo(filteredList, (l) => LEVEL_TAB[l.level], ALL, activeTab); + filteredList = useTabListFilterMemo( + filteredList, + (l) => LEVEL_TAB[l.level], + ALL, + activeTab + ); - const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab }); + const onTabClick = (activeTab: any) => + devTools.update(INDEX_KEY, { activeTab }); const onFilterChange = ({ target: { value } }: any) => devTools.update(INDEX_KEY, { filter: value }); @@ -136,34 +146,35 @@ function MobileConsolePanel() {
Console - +
} />
- +
+ No Data
} size="small" show={filteredList.length === 0} > - + {filteredList.map((log, index) => ( showDetails(log)} + showSingleTab /> ))} diff --git a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx index f61e04a9c..699c5e807 100644 --- a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx @@ -2,6 +2,8 @@ import React, { useState } from 'react'; import cn from 'classnames'; import { Icon } from 'UI'; import JumpButton from 'Shared/DevTools/JumpButton'; +import { Tag } from 'antd'; +import TabTag from "../TabTag"; interface Props { log: any; @@ -10,6 +12,8 @@ interface Props { renderWithNL?: any; style?: any; onClick?: () => void; + getTabNum?: (tab: string) => number; + showSingleTab: boolean; } function ConsoleRow(props: Props) { const { log, iconProps, jump, renderWithNL, style } = props; @@ -41,11 +45,13 @@ function ConsoleRow(props: Props) { const titleLine = lines[0]; const restLines = lines.slice(1); + const logSource = props.showSingleTab ? -1 : props.getTabNum?.(log.tabId); + const logTabId = log.tabId return (
(!!log.errorId ? props.onClick?.() : toggleExpand()) : undefined} > -
- -
+ {logSource !== -1 && } +
-
+
{canExpand && ( diff --git a/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx b/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx index adfb7c979..2b1a703f1 100644 --- a/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx +++ b/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx @@ -1,6 +1,8 @@ import React from 'react'; -import { Icon, Tooltip } from 'UI'; -import { shortDurationFromMs } from "App/date"; +import { Tooltip } from 'UI'; +import { CaretRightOutlined } from '@ant-design/icons'; +import { Button } from 'antd'; +import { shortDurationFromMs } from 'App/date'; interface Props { onClick: any; @@ -12,19 +14,24 @@ function JumpButton(props: Props) { return (
-
{ e.stopPropagation(); props.onClick(); }} + icon={} > - - JUMP -
- {props.time ?
- {shortDurationFromMs(props.time)} -
: null} + JUMP + + {props.time ? ( +
+ {shortDurationFromMs(props.time)} +
+ ) : null}
); diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index bb00633e6..2ad92eb8a 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -1,7 +1,6 @@ import { ResourceType, Timed } from 'Player'; import MobilePlayer from 'Player/mobile/IOSPlayer'; import WebPlayer from 'Player/web/WebPlayer'; -import { Duration } from 'luxon'; import { observer } from 'mobx-react-lite'; import React, { useMemo, useState } from 'react'; @@ -13,17 +12,19 @@ import { import { formatMs } from 'App/date'; import { useStore } from 'App/mstore'; import { formatBytes } from 'App/utils'; -import { Icon, Input, NoContent, Tabs, Toggler, Tooltip } from 'UI'; +import { Icon, NoContent, Tabs } from 'UI'; +import { Tooltip, Input, Switch, Form } from 'antd'; +import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons'; import FetchDetailsModal from 'Shared/FetchDetailsModal'; -import { WsChannel } from "App/player/web/messages"; +import { WsChannel } from 'App/player/web/messages'; import BottomBlock from '../BottomBlock'; import InfoLine from '../BottomBlock/InfoLine'; +import TabSelector from '../TabSelector'; import TimeTable from '../TimeTable'; import useAutoscroll, { getLastItemTime } from '../useAutoscroll'; import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter'; -import WSModal from './WSModal'; import WSPanel from './WSPanel'; const INDEX_KEY = 'network'; @@ -57,12 +58,6 @@ export const NETWORK_TABS = TAP_KEYS.map((tab) => ({ const DOM_LOADED_TIME_COLOR = 'teal'; const LOAD_TIME_COLOR = 'red'; -function compare(a: any, b: any, key: string) { - if (a[key] > b[key]) return 1; - if (a[key] < b[key]) return -1; - return 0; -} - export function renderType(r: any) { return ( {r.type}
}> @@ -79,14 +74,6 @@ export function renderName(r: any) { ); } -export function renderStart(r: any) { - return ( -
- {Duration.fromMillis(r.time).toFormat('mm:ss.SSS')} -
- ); -} - function renderSize(r: any) { if (r.responseBodySize) return formatBytes(r.responseBodySize); let triggerText; @@ -125,13 +112,10 @@ export function renderDuration(r: any) { if (!r.isRed && !r.isYellow) return text; let tooltipText; - let className = 'w-full h-full flex items-center '; if (r.isYellow) { tooltipText = 'Slower than average'; - className += 'warn color-orange'; } else { tooltipText = 'Much slower than average'; - className += 'error color-red'; } return ( @@ -151,7 +135,7 @@ function renderStatus({ error?: string; }) { const displayedStatus = error ? ( - +
{cached ? ( - +
{displayedStatus} @@ -178,13 +162,10 @@ function renderStatus({ ); } -function NetworkPanelCont({ - panelHeight, -}: { - panelHeight: number; -}) { +function NetworkPanelCont({ panelHeight }: { panelHeight: number }) { const { player, store } = React.useContext(PlayerContext); - const { sessionStore } = useStore(); + const { sessionStore, uiPlayerStore } = useStore(); + const startedAt = sessionStore.current.startedAt; const { domContentLoadedTime, @@ -192,7 +173,12 @@ function NetworkPanelCont({ domBuildingTime, tabStates, currentTab, + tabNames, } = store.get(); + const tabsArr = Object.keys(tabStates); + const tabValues = Object.values(tabStates); + const dataSource = uiPlayerStore.dataSource; + const showSingleTab = dataSource === 'current'; const { fetchList = [], resourceList = [], @@ -200,8 +186,34 @@ function NetworkPanelCont({ resourceListNow = [], websocketList = [], websocketListNow = [], - } = tabStates[currentTab]; - + } = React.useMemo(() => { + if (showSingleTab) { + return tabStates[currentTab] ?? {}; + } else { + const fetchList = tabValues.flatMap((tab) => tab.fetchList); + const resourceList = tabValues.flatMap((tab) => tab.resourceList); + const fetchListNow = tabValues + .flatMap((tab) => tab.fetchListNow) + .filter(Boolean); + const resourceListNow = tabValues + .flatMap((tab) => tab.resourceListNow) + .filter(Boolean); + const websocketList = tabValues.flatMap((tab) => tab.websocketList); + const websocketListNow = tabValues + .flatMap((tab) => tab.websocketListNow) + .filter(Boolean); + return { + fetchList, + resourceList, + fetchListNow, + resourceListNow, + websocketList, + websocketListNow, + }; + } + }, [currentTab, tabStates, dataSource, tabValues]); + const getTabNum = (tab: string) => tabsArr.findIndex((t) => t === tab) + 1; + const getTabName = (tabId: string) => tabNames[tabId] return ( ); } -function MobileNetworkPanelCont({ - panelHeight, -}: { - panelHeight: number; -}) { +function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) { const { player, store } = React.useContext(MobilePlayerContext); const { uiPlayerStore, sessionStore } = useStore(); const startedAt = sessionStore.current.startedAt; @@ -301,6 +312,9 @@ interface Props { onClose?: () => void; activeOutsideIndex?: number; isSpot?: boolean; + getTabNum?: (tab: string) => number; + getTabName?: (tabId: string) => string; + showSingleTab?: boolean; } export const NetworkPanelComp = observer( @@ -323,8 +337,13 @@ export const NetworkPanelComp = observer( onClose, activeOutsideIndex, isSpot, + getTabNum, + showSingleTab, + getTabName, }: Props) => { - const [selectedWsChannel, setSelectedWsChannel] = React.useState(null) + const [selectedWsChannel, setSelectedWsChannel] = React.useState< + WsChannel[] | null + >(null); const { showModal } = useModal(); const [showOnlyErrors, setShowOnlyErrors] = useState(false); @@ -480,10 +499,10 @@ export const NetworkPanelComp = observer( const showDetailsModal = (item: any) => { if (item.type === 'websocket') { const socketMsgList = websocketList.filter( - (ws) => ws.channelName === item.channelName - ); + (ws) => ws.channelName === item.channelName + ); - return setSelectedWsChannel(socketMsgList) + return setSelectedWsChannel(socketMsgList); } setIsDetailsModalActive(true); showModal( @@ -507,6 +526,61 @@ export const NetworkPanelComp = observer( stopAutoscroll(); }; + const tableCols = React.useMemo(() => { + const cols: any[] = [ + { + label: 'Status', + dataKey: 'status', + width: 90, + render: renderStatus, + }, + { + label: 'Type', + dataKey: 'type', + width: 90, + render: renderType, + }, + { + label: 'Method', + width: 80, + dataKey: 'method', + }, + { + label: 'Name', + width: 240, + dataKey: 'name', + render: renderName, + }, + { + label: 'Size', + width: 80, + dataKey: 'decodedBodySize', + render: renderSize, + hidden: activeTab === XHR, + }, + { + label: 'Duration', + width: 80, + dataKey: 'duration', + render: renderDuration, + }, + ]; + if (!showSingleTab && !isSpot) { + cols.unshift({ + label: 'Source', + width: 64, + render: (r: Record) => ( + +
+ {getTabNum?.(r.tabId) ?? 0} +
+
+ ), + }); + } + return cols; + }, [showSingleTab]); + return ( )}
- +
+ {!isMobile && !isSpot ? : null} + } + /> +
- setShowOnlyErrors(!showOnlyErrors)} - label="4xx-5xx Only" - /> + + +
- +
+ No Data
} @@ -613,52 +700,13 @@ export const NetworkPanelComp = observer( }} activeIndex={activeIndex} > - {[ - // { - // label: 'Start', - // width: 120, - // render: renderStart, - // }, - { - label: 'Status', - dataKey: 'status', - width: 90, - render: renderStatus, - }, - { - label: 'Type', - dataKey: 'type', - width: 90, - render: renderType, - }, - { - label: 'Method', - width: 80, - dataKey: 'method', - }, - { - label: 'Name', - width: 240, - dataKey: 'name', - render: renderName, - }, - { - label: 'Size', - width: 80, - dataKey: 'decodedBodySize', - render: renderSize, - hidden: activeTab === XHR, - }, - { - label: 'Duration', - width: 80, - dataKey: 'duration', - render: renderDuration, - }, - ]} + {tableCols} {selectedWsChannel ? ( - setSelectedWsChannel(null)} /> + setSelectedWsChannel(null)} + /> ) : null}
diff --git a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx index 1ee227ae1..86bd000ec 100644 --- a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx +++ b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx @@ -1,8 +1,13 @@ import { Timed } from 'Player'; import React, { useEffect, useMemo, useState } from 'react'; import { observer } from 'mobx-react-lite'; -import { Tabs, Input, NoContent, Icon } from 'UI'; -import { PlayerContext, MobilePlayerContext } from 'App/components/Session/playerContext'; +import { Tabs, NoContent, Icon } from 'UI'; +import { Input } from 'antd'; +import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons'; +import { + PlayerContext, + MobilePlayerContext, +} from 'App/components/Session/playerContext'; import BottomBlock from '../BottomBlock'; import { useModal } from 'App/components/Modal'; import { useStore } from 'App/mstore'; @@ -10,6 +15,7 @@ import { typeList } from 'Types/session/stackEvent'; import StackEventRow from 'Shared/DevTools/StackEventRow'; import StackEventModal from '../StackEventModal'; +import { Segmented, Tooltip } from 'antd'; import useAutoscroll, { getLastItemTime } from '../useAutoscroll'; import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter'; import { VList, VListHandle } from 'virtua'; @@ -24,198 +30,247 @@ const ALL = 'ALL'; const TAB_KEYS = [ALL, ...typeList] as const; const TABS = TAB_KEYS.map((tab) => ({ text: tab, key: tab })); -type EventsList = Array; +type EventsList = Array< + Timed & { name: string; source: string; key: string; payload?: string[] } +>; -const WebStackEventPanelComp = observer( - () => { - const { uiPlayerStore } = useStore(); - const zoomEnabled = uiPlayerStore.timelineZoom.enabled; - const zoomStartTs = uiPlayerStore.timelineZoom.startTs; - const zoomEndTs = uiPlayerStore.timelineZoom.endTs; - const { player, store } = React.useContext(PlayerContext); - const jump = (t: number) => player.jump(t); - const { currentTab, tabStates } = store.get(); +const WebStackEventPanelComp = observer(() => { + const { uiPlayerStore } = useStore(); + const zoomEnabled = uiPlayerStore.timelineZoom.enabled; + const zoomStartTs = uiPlayerStore.timelineZoom.startTs; + const zoomEndTs = uiPlayerStore.timelineZoom.endTs; + const { player, store } = React.useContext(PlayerContext); + const jump = (t: number) => player.jump(t); + const { currentTab, tabStates } = store.get(); - const { stackList: list = [], stackListNow: listNow = [] } = tabStates[currentTab]; + const { stackList: list = [], stackListNow: listNow = [] } = + tabStates[currentTab]; - return ( - - ); - } -); + return ( + + ); +}); export const WebStackEventPanel = WebStackEventPanelComp; -const MobileStackEventPanelComp = observer( - () => { - const { uiPlayerStore } = useStore(); - const zoomEnabled = uiPlayerStore.timelineZoom.enabled; - const zoomStartTs = uiPlayerStore.timelineZoom.startTs; - const zoomEndTs = uiPlayerStore.timelineZoom.endTs; - const { player, store } = React.useContext(MobilePlayerContext); - const jump = (t: number) => player.jump(t); - const { eventList: list = [], eventListNow: listNow = [] } = store.get(); +const MobileStackEventPanelComp = observer(() => { + const { uiPlayerStore } = useStore(); + const zoomEnabled = uiPlayerStore.timelineZoom.enabled; + const zoomStartTs = uiPlayerStore.timelineZoom.startTs; + const zoomEndTs = uiPlayerStore.timelineZoom.endTs; + const { player, store } = React.useContext(MobilePlayerContext); + const jump = (t: number) => player.jump(t); + const { eventList: list = [], eventListNow: listNow = [] } = store.get(); - return ( - - ); - } -); + return ( + + ); +}); export const MobileStackEventPanel = MobileStackEventPanelComp; -const EventsPanel = observer(({ - list, - listNow, - jump, - zoomEnabled, - zoomStartTs, - zoomEndTs, -}: { - list: EventsList; - listNow: EventsList; - jump: (t: number) => void; - zoomEnabled: boolean; - zoomStartTs: number; - zoomEndTs: number; -}) => { - const { - sessionStore: { devTools }, - } = useStore(); - const { showModal } = useModal(); - const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); // TODO:embed that into useModal - const filter = devTools[INDEX_KEY].filter; - const activeTab = devTools[INDEX_KEY].activeTab; - const activeIndex = devTools[INDEX_KEY].index; +const EventsPanel = observer( + ({ + list, + listNow, + jump, + zoomEnabled, + zoomStartTs, + zoomEndTs, + isMobile, + }: { + list: EventsList; + listNow: EventsList; + jump: (t: number) => void; + zoomEnabled: boolean; + zoomStartTs: number; + zoomEndTs: number; + isMobile?: boolean; + }) => { + const { + sessionStore: { devTools }, + } = useStore(); + const { showModal } = useModal(); + const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); // TODO:embed that into useModal + const filter = devTools[INDEX_KEY].filter; + const activeTab = devTools[INDEX_KEY].activeTab; + const activeIndex = devTools[INDEX_KEY].index; - const inZoomRangeList = list.filter(({ time }) => - zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true - ); - const inZoomRangeListNow = listNow.filter(({ time }) => - zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true - ); + const inZoomRangeList = list.filter(({ time }) => + zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true + ); + const inZoomRangeListNow = listNow.filter(({ time }) => + zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true + ); - let filteredList = useRegExListFilterMemo(inZoomRangeList, (it) => { - const searchBy = [it.name] - if (it.payload) { - const payload = Array.isArray(it.payload) ? it.payload.join(',') : JSON.stringify(it.payload); - searchBy.push(payload); - } - return searchBy - }, filter); - filteredList = useTabListFilterMemo(filteredList, (it) => it.source, ALL, activeTab); - - const onTabClick = (activeTab: (typeof TAB_KEYS)[number]) => - devTools.update(INDEX_KEY, { activeTab }); - const onFilterChange = ({ target: { value } }: React.ChangeEvent) => devTools.update(INDEX_KEY, { filter: value }); - const tabs = useMemo( - () => TABS.filter(({ key }) => key === ALL || inZoomRangeList.some(({ source }) => key === source)), - [inZoomRangeList.length] - ); - - const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll( - filteredList, - getLastItemTime(inZoomRangeListNow), - activeIndex, - (index) => devTools.update(INDEX_KEY, { index }) - ); - const onMouseEnter = stopAutoscroll; - const onMouseLeave = () => { - if (isDetailsModalActive) { - return; - } - timeoutStartAutoscroll(); - }; - - const showDetails = (item: any) => { - setIsDetailsModalActive(true); - showModal(, { - right: true, - width: 500, - onClose: () => { - setIsDetailsModalActive(false); - timeoutStartAutoscroll(); + let filteredList = useRegExListFilterMemo( + inZoomRangeList, + (it) => { + const searchBy = [it.name]; + if (it.payload) { + const payload = Array.isArray(it.payload) + ? it.payload.join(',') + : JSON.stringify(it.payload); + searchBy.push(payload); + } + return searchBy; }, - }); - devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) }); - stopAutoscroll(); - }; + filter + ); + filteredList = useTabListFilterMemo( + filteredList, + (it) => it.source, + ALL, + activeTab + ); - const _list = React.useRef(null); - useEffect(() => { - if (_list.current) { - _list.current.scrollToIndex(activeIndex); - } - }, [activeIndex]); + const onTabClick = (activeTab: (typeof TAB_KEYS)[number]) => + devTools.update(INDEX_KEY, { activeTab }); + const onFilterChange = ({ + target: { value }, + }: React.ChangeEvent) => + devTools.update(INDEX_KEY, { filter: value }); + const tabs = useMemo( + () => + TABS.filter( + ({ key }) => + key === ALL || inZoomRangeList.some(({ source }) => key === source) + ), + [inZoomRangeList.length] + ); - return ( - - -
- Stack Events - -
- -
- - - - No Data -
- } - size="small" - show={filteredList.length === 0} - > - - {filteredList.map((item, index) => ( - { - stopAutoscroll(); - devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) }); - jump(item.time); - }} - onClick={() => showDetails(item)} + const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll( + filteredList, + getLastItemTime(inZoomRangeListNow), + activeIndex, + (index) => devTools.update(INDEX_KEY, { index }) + ); + const onMouseEnter = stopAutoscroll; + const onMouseLeave = () => { + if (isDetailsModalActive) { + return; + } + timeoutStartAutoscroll(); + }; + + const showDetails = (item: any) => { + setIsDetailsModalActive(true); + showModal(, { + right: true, + width: 500, + onClose: () => { + setIsDetailsModalActive(false); + timeoutStartAutoscroll(); + }, + }); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) }); + stopAutoscroll(); + }; + + const _list = React.useRef(null); + useEffect(() => { + if (_list.current) { + _list.current.scrollToIndex(activeIndex); + } + }, [activeIndex]); + + return ( + + +
+ + Stack Events + + +
+
+ {isMobile ? null : ( + + Current Tab + + ), + value: 'current', + disabled: true, + }, + ]} + defaultValue="all" + size="small" + className="rounded-full font-medium" /> - ))} - - - - - ); -}); + )} + } + /> +
+
+ + + + No Data +
+ } + size="small" + show={filteredList.length === 0} + > + + {filteredList.map((item, index) => ( + { + stopAutoscroll(); + devTools.update(INDEX_KEY, { + index: filteredList.indexOf(item), + }); + jump(item.time); + }} + onClick={() => showDetails(item)} + /> + ))} + + + + + ); + } +); diff --git a/frontend/app/components/shared/DevTools/TabSelector.tsx b/frontend/app/components/shared/DevTools/TabSelector.tsx new file mode 100644 index 000000000..293faf117 --- /dev/null +++ b/frontend/app/components/shared/DevTools/TabSelector.tsx @@ -0,0 +1,22 @@ +import React from 'react' +import { Segmented } from 'antd' +import { useStore } from 'App/mstore'; +import { observer } from 'mobx-react-lite'; + +function TabSelector() { + const { uiPlayerStore } = useStore(); + const currentValue = uiPlayerStore.dataSource; + const options = [ + { label: 'All Tabs', value: 'all' }, + { label: 'Current Tab', value: 'current' } + ] + + const onChange = (value: 'all' | 'current') => { + uiPlayerStore.changeDataSource(value) + } + return ( + + ) +} + +export default observer(TabSelector) \ No newline at end of file diff --git a/frontend/app/components/shared/DevTools/TabTag.tsx b/frontend/app/components/shared/DevTools/TabTag.tsx new file mode 100644 index 000000000..4478cfb6f --- /dev/null +++ b/frontend/app/components/shared/DevTools/TabTag.tsx @@ -0,0 +1,23 @@ +import React from 'react'; +import { Tooltip } from 'antd'; +import { observer } from 'mobx-react-lite'; +import { PlayerContext } from 'Components/Session/playerContext'; + +function TabTag({ logSource, logTabId }: { logSource: number; logTabId: string }) { + const { store } = React.useContext(PlayerContext); + const { tabNames } = store.get(); + + return ( + +
+ {logSource} +
+
+ ); +} + +export default observer(TabTag); diff --git a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx index 90eabe26e..5961ba599 100644 --- a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx +++ b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx @@ -199,7 +199,7 @@ export default class TimeTable extends React.PureComponent { return (
{ {columns .filter((i: any) => !i.hidden) .map(({ dataKey, render, width, label }) => ( -
+
{render ? render(row) : row[dataKey || ''] || {'empty'}} diff --git a/frontend/app/components/shared/Insights/SankeyChart/NodeDropdown.tsx b/frontend/app/components/shared/Insights/SankeyChart/NodeDropdown.tsx index 0efb8fb4f..9149486e2 100644 --- a/frontend/app/components/shared/Insights/SankeyChart/NodeDropdown.tsx +++ b/frontend/app/components/shared/Insights/SankeyChart/NodeDropdown.tsx @@ -1,23 +1,11 @@ import React from 'react'; -// import Select from 'Shared/Select'; -import { Dropdown, MenuProps, Select, Space } from 'antd'; -import { DownOutlined, SmileOutlined } from '@ant-design/icons'; +import { MenuProps, Select } from 'antd'; interface Props { payload: any; } function NodeDropdown(props: Props) { - const items: MenuProps['items'] = [ - { - key: '1', - label: ( - - 1st menu item - - ) - } - ]; return (