diff --git a/api/.env.default b/api/.env.default index 6ae959a7d..7dd248bec 100644 --- a/api/.env.default +++ b/api/.env.default @@ -28,8 +28,8 @@ jwt_algorithm=HS512 jwt_exp_delta_seconds=2592000 jwt_issuer=openreplay-default-foss jwt_secret="SET A RANDOM STRING HERE" -peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list -peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live +assist=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live +assistList=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list pg_dbname=postgres pg_host=postgresql.db.svc.cluster.local pg_password=asayerPostgres @@ -37,11 +37,13 @@ pg_port=5432 pg_user=postgres pg_timeout=30 pg_minconn=45 +PG_RETRY_MAX=50 +PG_RETRY_INTERVAL=2 put_S3_TTL=20 sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps +sourcemaps_reader=http://127.0.0.1:9000/ stage=default-foss version_number=1.4.0 \ No newline at end of file diff --git a/api/Dockerfile b/api/Dockerfile index 780518ff3..0673ab2b5 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -5,6 +5,15 @@ WORKDIR /work COPY . . RUN pip install -r requirements.txt RUN mv .env.default .env +ENV APP_NAME chalice +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* && \ + cd sourcemap-reader && \ + npm install # Add Tini # Startup daemon diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts index ed8f06eac..76e8c262a 100644 --- a/api/Dockerfile.alerts +++ b/api/Dockerfile.alerts @@ -4,8 +4,9 @@ LABEL Maintainer="KRAIEM Taha Yassine" WORKDIR /work COPY . . RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh ENV pg_minconn 2 +ENV APP_NAME alerts # Add Tini # Startup daemon diff --git a/api/app.py b/api/app.py index d261dadac..959f1ef8f 100644 --- a/api/app.py +++ b/api/app.py @@ -9,12 +9,11 @@ from starlette.responses import StreamingResponse from chalicelib.utils import helper from chalicelib.utils import pg_client from routers import core, core_dynamic -from routers.app import v1_api from routers.crons import core_crons from routers.crons import core_dynamic_crons -from routers.subs import dashboard +from routers.subs import dashboard, insights, metrics, v1_api -app = FastAPI() +app = FastAPI(root_path="/api") @app.middleware('http') @@ -54,7 +53,8 @@ app.include_router(core_dynamic.public_app) app.include_router(core_dynamic.app) app.include_router(core_dynamic.app_apikey) app.include_router(dashboard.app) -# app.include_router(insights.app) +app.include_router(metrics.app) +app.include_router(insights.app) app.include_router(v1_api.app_apikey) Schedule = AsyncIOScheduler() diff --git a/api/auth/auth_project.py b/api/auth/auth_project.py new file mode 100644 index 000000000..98a495bbb --- /dev/null +++ b/api/auth/auth_project.py @@ -0,0 +1,24 @@ +from fastapi import Request +from starlette import status +from starlette.exceptions import HTTPException + +import schemas +from chalicelib.core import projects +from or_dependencies import OR_context + + +class ProjectAuthorizer: + def __init__(self, project_identifier): + self.project_identifier: str = project_identifier + + async def __call__(self, request: Request) -> None: + if len(request.path_params.keys()) == 0 or request.path_params.get(self.project_identifier) is None: + return + current_user: schemas.CurrentContext = await OR_context(request) + project_identifier = request.path_params[self.project_identifier] + if (self.project_identifier == "projectId" \ + and projects.get_project(project_id=project_identifier, tenant_id=current_user.tenant_id) is None) \ + or (self.project_identifier.lower() == "projectKey" \ + and projects.get_internal_project_id(project_key=project_identifier) is None): + print("project not found") + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="project not found.") diff --git a/api/build.sh b/api/build.sh index 29b8911ca..cec7525f5 100644 --- a/api/build.sh +++ b/api/build.sh @@ -18,6 +18,8 @@ check_prereq() { } function build_api(){ + cp -R ../utilities/utils ../sourcemap-reader/. + cp -R ../sourcemap-reader . tag="" # Copy enterprise code [[ $1 == "ee" ]] && { diff --git a/api/build_alerts.sh b/api/build_alerts.sh index 51504a276..f333c8dc8 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -32,7 +32,7 @@ function make_submodule() { cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/ # -- end of generated part } - cp -R ./{Dockerfile.alerts,requirements.txt,.env.default,entrypoint.sh} ./alerts/ + cp -R ./{Dockerfile.alerts,requirements.txt,.env.default,entrypoint_alerts.sh} ./alerts/ cp -R ./chalicelib/utils/html ./alerts/chalicelib/utils/html } diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index c6fb35713..b2926fd0c 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,9 +1,7 @@ import requests from decouple import config -import schemas -from chalicelib.core import projects, sessions -from chalicelib.utils import pg_client, helper +from chalicelib.core import projects SESSION_PROJECTION_COLS = """s.project_id, s.session_id::text AS session_id, @@ -21,61 +19,22 @@ SESSION_PROJECTION_COLS = """s.project_id, """ -def get_live_sessions(project_id, filters=None): - project_key = projects.get_project_key(project_id) - connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") - if connected_peers.status_code != 200: - print("!! issue with the peer-server") - print(connected_peers.text) - return [] - connected_peers = connected_peers.json().get("data", []) - - if len(connected_peers) == 0: - return [] - connected_peers = tuple(connected_peers) - extra_constraints = ["project_id = %(project_id)s", "session_id IN %(connected_peers)s"] - extra_params = {} - if filters is not None: - for i, f in enumerate(filters): - if not isinstance(f.get("value"), list): - f["value"] = [f.get("value")] - if len(f["value"]) == 0 or f["value"][0] is None: - continue - filter_type = f["type"].upper() - f["value"] = sessions.__get_sql_value_multiple(f["value"]) - if filter_type == schemas.FilterType.user_id: - op = sessions.__get_sql_operator(f["operator"]) - extra_constraints.append(f"user_id {op} %(value_{i})s") - extra_params[f"value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op) - - with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""\ - SELECT {SESSION_PROJECTION_COLS}, %(project_key)s||'-'|| session_id AS peer_id - FROM public.sessions AS s - WHERE {" AND ".join(extra_constraints)} - ORDER BY start_ts DESC - LIMIT 500;""", - {"project_id": project_id, - "connected_peers": connected_peers, - "project_key": project_key, - **extra_params}) - cur.execute(query) - results = cur.fetchall() - return helper.list_to_camel_case(results) - - def get_live_sessions_ws(project_id, user_id=None): project_key = projects.get_project_key(project_id) params = {} if user_id and len(user_id) > 0: params["userId"] = user_id try: - connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params) + connected_peers = requests.get(config("assist") % config("S3_KEY") + f"/{project_key}", params, + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) return [] live_peers = connected_peers.json().get("data", []) + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + live_peers = [] except Exception as e: print("issue getting Live-Assist response") print(str(e)) @@ -105,12 +64,16 @@ def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) try: - connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}") + connected_peers = requests.get(config("assistList") % config("S3_KEY") + f"/{project_key}", + timeout=config("assistTimeout", cast=int, default=5)) if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) return False connected_peers = connected_peers.json().get("data", []) + except requests.exceptions.Timeout: + print("Timeout getting Assist response") + return False except Exception as e: print("issue getting Assist response") print(str(e)) diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index 33a859cc8..5756e82ab 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -42,7 +42,7 @@ def generate_jwt(id, tenant_id, iat, aud): payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + config("jwt_exp_delta_seconds",cast=int) + TimeUTC.get_utc_offset() // 1000, + "exp": iat // 1000 + config("jwt_exp_delta_seconds", cast=int) + TimeUTC.get_utc_offset() // 1000, "iss": config("jwt_issuer"), "iat": iat // 1000, "aud": aud diff --git a/api/chalicelib/core/boarding.py b/api/chalicelib/core/boarding.py index c303643c8..68843b2f8 100644 --- a/api/chalicelib/core/boarding.py +++ b/api/chalicelib/core/boarding.py @@ -5,39 +5,38 @@ from chalicelib.core import users def get_state(tenant_id): - my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - pids = [s["projectId"] for s in my_projects] + pids = projects.get_projects_ids(tenant_id=tenant_id) with pg_client.PostgresClient() as cur: recorded = False meta = False if len(pids) > 0: cur.execute( - cur.mogrify("""\ - SELECT - COUNT(*) - FROM public.sessions AS s - where s.project_id IN %(ids)s - LIMIT 1;""", + cur.mogrify("""SELECT EXISTS(( SELECT 1 + FROM public.sessions AS s + WHERE s.project_id IN %(ids)s)) AS exists;""", {"ids": tuple(pids)}) ) - recorded = cur.fetchone()["count"] > 0 + recorded = cur.fetchone()["exists"] meta = False if recorded: - cur.execute("""SELECT SUM((SELECT COUNT(t.meta) - FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5), - (p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10), - (sessions.user_id)) AS t(meta) - WHERE t.meta NOTNULL)) - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 'defined' - FROM public.sessions - WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON(TRUE) - WHERE p.deleted_at ISNULL;""" - ) + cur.execute("""SELECT EXISTS((SELECT 1 + FROM public.projects AS p + LEFT JOIN LATERAL ( SELECT 1 + FROM public.sessions + WHERE sessions.project_id = p.project_id + AND sessions.user_id IS NOT NULL + LIMIT 1) AS sessions(user_id) ON (TRUE) + WHERE p.deleted_at ISNULL + AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL + OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL + OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL + OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL + OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL + OR p.metadata_10 IS NOT NULL ) + )) AS exists;""") - meta = cur.fetchone()["sum"] > 0 + meta = cur.fetchone()["exists"] return [ {"task": "Install OpenReplay", @@ -58,22 +57,18 @@ def get_state(tenant_id): def get_state_installing(tenant_id): - my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - pids = [s["projectId"] for s in my_projects] + pids = projects.get_projects_ids(tenant_id=tenant_id) with pg_client.PostgresClient() as cur: recorded = False if len(pids) > 0: cur.execute( - cur.mogrify("""\ - SELECT - COUNT(*) - FROM public.sessions AS s - where s.project_id IN %(ids)s - LIMIT 1;""", + cur.mogrify("""SELECT EXISTS(( SELECT 1 + FROM public.sessions AS s + WHERE s.project_id IN %(ids)s)) AS exists;""", {"ids": tuple(pids)}) ) - recorded = cur.fetchone()["count"] > 0 + recorded = cur.fetchone()["exists"] return {"task": "Install OpenReplay", "done": recorded, @@ -82,20 +77,23 @@ def get_state_installing(tenant_id): def get_state_identify_users(tenant_id): with pg_client.PostgresClient() as cur: - cur.execute( - """SELECT SUM((SELECT COUNT(t.meta) - FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5), - (p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10), - (sessions.user_id)) AS t(meta) - WHERE t.meta NOTNULL)) - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 'defined' - FROM public.sessions - WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON(TRUE) - WHERE p.deleted_at ISNULL;""") + cur.execute("""SELECT EXISTS((SELECT 1 + FROM public.projects AS p + LEFT JOIN LATERAL ( SELECT 1 + FROM public.sessions + WHERE sessions.project_id = p.project_id + AND sessions.user_id IS NOT NULL + LIMIT 1) AS sessions(user_id) ON (TRUE) + WHERE p.deleted_at ISNULL + AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL + OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL + OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL + OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL + OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL + OR p.metadata_10 IS NOT NULL ) + )) AS exists;""") - meta = cur.fetchone()["sum"] > 0 + meta = cur.fetchone()["exists"] return {"task": "Identify Users", "done": meta, diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index e0b0ed432..3e7fc100a 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -9,11 +9,11 @@ from chalicelib.utils.TimeUTC import TimeUTC PIE_CHART_GROUP = 5 -def __try_live(project_id, data: schemas.CreateCustomMetricsSchema): +def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): results = [] for i, s in enumerate(data.series): - s.filter.startDate = data.startDate - s.filter.endDate = data.endDate + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value)) @@ -42,7 +42,7 @@ def __try_live(project_id, data: schemas.CreateCustomMetricsSchema): return results -def merged_live(project_id, data: schemas.CreateCustomMetricsSchema): +def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema): series_charts = __try_live(project_id=project_id, data=data) if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: return series_charts @@ -54,13 +54,11 @@ def merged_live(project_id, data: schemas.CreateCustomMetricsSchema): return results -def __get_merged_metric(project_id, user_id, metric_id, - data: Union[schemas.CustomMetricChartPayloadSchema, - schemas.CustomMetricSessionsPayloadSchema]) \ +def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema, + schemas.CustomMetricSessionsPayloadSchema]) \ -> Union[schemas.CreateCustomMetricsSchema, None]: - metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - if metric is None: - return None + if data.series is not None and len(data.series) > 0: + metric["series"] = data.series metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric}) if len(data.filters) > 0 or len(data.events) > 0: for s in metric.series: @@ -71,11 +69,12 @@ def __get_merged_metric(project_id, user_id, metric_id, return metric -def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema): - metric: schemas.CreateCustomMetricsSchema = __get_merged_metric(project_id=project_id, user_id=user_id, - metric_id=metric_id, data=data) +def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None): + if metric is None: + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if metric is None: return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) series_charts = __try_live(project_id=project_id, data=metric) if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table: return series_charts @@ -88,21 +87,40 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema): - metric: schemas.CreateCustomMetricsSchema = __get_merged_metric(project_id=project_id, user_id=user_id, - metric_id=metric_id, data=data) + metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) + if metric is None: + return None + metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None results = [] for s in metric.series: - s.filter.startDate = data.startDate - s.filter.endDate = data.endDate + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page results.append({"seriesId": s.series_id, "seriesName": s.name, **sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)}) return results -def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema): +def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema): + results = [] + if data.series is None: + return results + for s in data.series: + s.filter.startDate = data.startTimestamp + s.filter.endDate = data.endTimestamp + s.filter.limit = data.limit + s.filter.page = data.page + results.append({"seriesId": None, "seriesName": s.name, + **sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)}) + + return results + + +def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False): with pg_client.PostgresClient() as cur: _data = {} for i, s in enumerate(data.series): @@ -129,6 +147,8 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema): query ) r = cur.fetchone() + if dashboard: + return r["metric_id"] return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} @@ -147,10 +167,11 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche "metric_value": data.metric_value, "metric_format": data.metric_format} for i, s in enumerate(data.series): prefix = "u_" + if s.index is None: + s.index = i if s.series_id is None or s.series_id not in series_ids: n_series.append({"i": i, "s": s}) prefix = "n_" - s.index = i else: u_series.append({"i": i, "s": s}) u_series_ids.append(s.series_id) @@ -192,40 +213,60 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche SET name = %(name)s, is_public= %(is_public)s, view_type= %(view_type)s, metric_type= %(metric_type)s, metric_of= %(metric_of)s, metric_value= %(metric_value)s, - metric_format= %(metric_format)s + metric_format= %(metric_format)s, + edited_at = timezone('utc'::text, now()) WHERE metric_id = %(metric_id)s AND project_id = %(project_id)s AND (user_id = %(user_id)s OR is_public) RETURNING metric_id;""", params) - cur.execute( - query - ) + cur.execute(query) return get(metric_id=metric_id, project_id=project_id, user_id=user_id) -def get_all(project_id, user_id): +def get_all(project_id, user_id, include_series=False): with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """SELECT * - FROM metrics - LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series + sub_join = "" + if include_series: + sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series FROM metric_series WHERE metric_series.metric_id = metrics.metric_id AND metric_series.deleted_at ISNULL - ) AS metric_series ON (TRUE) + ) AS metric_series ON (TRUE)""" + cur.execute( + cur.mogrify( + f"""SELECT * + FROM metrics + {sub_join} + LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards + FROM (SELECT DISTINCT dashboard_id, name, is_public + FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id) + WHERE deleted_at ISNULL + AND dashboard_widgets.metric_id = metrics.metric_id + AND project_id = %(project_id)s + AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards + ) AS connected_dashboards ON (TRUE) + LEFT JOIN LATERAL (SELECT email AS owner_email + FROM users + WHERE deleted_at ISNULL + AND users.user_id = metrics.user_id + ) AS owner ON (TRUE) WHERE metrics.project_id = %(project_id)s AND metrics.deleted_at ISNULL - AND (user_id = %(user_id)s OR is_public) - ORDER BY created_at;""", + AND (user_id = %(user_id)s OR metrics.is_public) + ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""", {"project_id": project_id, "user_id": user_id} ) ) rows = cur.fetchall() - for r in rows: - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - for s in r["series"]: - s["filter"] = helper.old_search_payload_to_flat(s["filter"]) + if include_series: + for r in rows: + # r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) + for s in r["series"]: + s["filter"] = helper.old_search_payload_to_flat(s["filter"]) + else: + for r in rows: + r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) + r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"]) rows = helper.list_to_camel_case(rows) return rows @@ -235,7 +276,7 @@ def delete(project_id, metric_id, user_id): cur.execute( cur.mogrify("""\ UPDATE public.metrics - SET deleted_at = timezone('utc'::text, now()) + SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) WHERE project_id = %(project_id)s AND metric_id = %(metric_id)s AND (user_id = %(user_id)s OR is_public);""", @@ -256,6 +297,18 @@ def get(metric_id, project_id, user_id, flatten=True): WHERE metric_series.metric_id = metrics.metric_id AND metric_series.deleted_at ISNULL ) AS metric_series ON (TRUE) + LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards + FROM (SELECT dashboard_id, name, is_public + FROM dashboards + WHERE deleted_at ISNULL + AND project_id = %(project_id)s + AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards + ) AS connected_dashboards ON (TRUE) + LEFT JOIN LATERAL (SELECT email AS owner_email + FROM users + WHERE deleted_at ISNULL + AND users.user_id = metrics.user_id + ) AS owner ON (TRUE) WHERE metrics.project_id = %(project_id)s AND metrics.deleted_at ISNULL AND (metrics.user_id = %(user_id)s OR metrics.is_public) @@ -268,12 +321,46 @@ def get(metric_id, project_id, user_id, flatten=True): if row is None: return None row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"]) if flatten: for s in row["series"]: s["filter"] = helper.old_search_payload_to_flat(s["filter"]) return helper.dict_to_camel_case(row) +def get_with_template(metric_id, project_id, user_id, include_dashboard=True): + with pg_client.PostgresClient() as cur: + sub_query = "" + if include_dashboard: + sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards + FROM (SELECT dashboard_id, name, is_public + FROM dashboards + WHERE deleted_at ISNULL + AND project_id = %(project_id)s + AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards + ) AS connected_dashboards ON (TRUE)""" + cur.execute( + cur.mogrify( + f"""SELECT * + FROM metrics + LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series + FROM metric_series + WHERE metric_series.metric_id = metrics.metric_id + AND metric_series.deleted_at ISNULL + ) AS metric_series ON (TRUE) + {sub_query} + WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL) + AND metrics.deleted_at ISNULL + AND (metrics.user_id = %(user_id)s OR metrics.is_public) + AND metrics.metric_id = %(metric_id)s + ORDER BY created_at;""", + {"metric_id": metric_id, "project_id": project_id, "user_id": user_id} + ) + ) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + def get_series_for_alert(project_id, user_id): with pg_client.PostgresClient() as cur: cur.execute( diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py new file mode 100644 index 000000000..7b7bfe252 --- /dev/null +++ b/api/chalicelib/core/dashboards.py @@ -0,0 +1,319 @@ +import json + +import schemas +from chalicelib.core import custom_metrics, metrics +from chalicelib.utils import helper +from chalicelib.utils import pg_client +from chalicelib.utils.TimeUTC import TimeUTC + +CATEGORY_DESCRIPTION = { + 'overview': 'High-level metrics and web vitals.', + 'custom': 'Previously created custom metrics by me and my team.', + 'errors': 'Keep a closer eye on errors and track their type, origin and domain.', + 'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.', + 'resources': 'Find out which resources are missing and those that may be slowing your web app.' +} + + +def get_templates(project_id, user_id): + with pg_client.PostgresClient() as cur: + pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets + FROM (SELECT * , default_config AS config + FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series + FROM metric_series + WHERE metric_series.metric_id = metrics.metric_id + AND metric_series.deleted_at ISNULL + ) AS metric_series ON (TRUE) + WHERE deleted_at IS NULL + AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s))) + ) AS metrics + GROUP BY category + ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""", + {"project_id": project_id, "userId": user_id}) + cur.execute(pg_query) + rows = cur.fetchall() + for r in rows: + r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "") + for w in r["widgets"]: + w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) + w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) + return helper.list_to_camel_case(rows) + + +def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): + with pg_client.PostgresClient() as cur: + pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned) + VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s) + RETURNING *""" + params = {"userId": user_id, "projectId": project_id, **data.dict()} + if data.metrics is not None and len(data.metrics) > 0: + pg_query = f"""WITH dash AS ({pg_query}) + INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) + VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])} + RETURNING (SELECT dashboard_id FROM dash)""" + for i, m in enumerate(data.metrics): + params[f"metric_id_{i}"] = m + # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \ + # .get("properties", {}).get("config", {}).get("default", {}) + # params[f"config_{i}"]["position"] = i + # params[f"config_{i}"] = json.dumps(params[f"config_{i}"]) + params[f"config_{i}"] = json.dumps({"position": i}) + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is None: + return {"errors": ["something went wrong while creating the dashboard"]} + return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])} + + +def get_dashboards(project_id, user_id): + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT * + FROM dashboards + WHERE deleted_at ISNULL + AND project_id = %(projectId)s + AND (user_id = %(userId)s OR is_public);""" + params = {"userId": user_id, "projectId": project_id} + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return helper.list_to_camel_case(rows) + + +def get_dashboard(project_id, user_id, dashboard_id): + with pg_client.PostgresClient() as cur: + pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets + FROM dashboards + LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets + FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series + FROM metrics + INNER JOIN dashboard_widgets USING (metric_id) + LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series + FROM metric_series + WHERE metric_series.metric_id = metrics.metric_id + AND metric_series.deleted_at ISNULL + ) AS metric_series ON (TRUE) + WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id + AND metrics.deleted_at ISNULL + AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics + ) AS all_metric_widgets ON (TRUE) + WHERE dashboards.deleted_at ISNULL + AND dashboards.project_id = %(projectId)s + AND dashboard_id = %(dashboard_id)s + AND (dashboards.user_id = %(userId)s OR is_public);""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) + for w in row["widgets"]: + w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"]) + w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"]) + for s in w["series"]: + s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"]) + return helper.dict_to_camel_case(row) + + +def delete_dashboard(project_id, user_id, dashboard_id): + with pg_client.PostgresClient() as cur: + pg_query = """UPDATE dashboards + SET deleted_at = timezone('utc'::text, now()) + WHERE dashboards.project_id = %(projectId)s + AND dashboard_id = %(dashboard_id)s + AND (dashboards.user_id = %(userId)s OR is_public);""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id} + cur.execute(cur.mogrify(pg_query, params)) + return {"data": {"success": True}} + + +def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema): + with pg_client.PostgresClient() as cur: + pg_query = """SELECT COALESCE(COUNT(*),0) AS count + FROM dashboard_widgets + WHERE dashboard_id = %(dashboard_id)s;""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + offset = row["count"] + pg_query = f"""UPDATE dashboards + SET name = %(name)s + {", is_public = %(is_public)s" if data.is_public is not None else ""} + {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""} + WHERE dashboards.project_id = %(projectId)s + AND dashboard_id = %(dashboard_id)s + AND (dashboards.user_id = %(userId)s OR is_public)""" + if data.metrics is not None and len(data.metrics) > 0: + pg_query = f"""WITH dash AS ({pg_query}) + INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) + VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};""" + for i, m in enumerate(data.metrics): + params[f"metric_id_{i}"] = m + # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \ + # .get("properties", {}).get("config", {}).get("default", {}) + # params[f"config_{i}"]["position"] = i + # params[f"config_{i}"] = json.dumps(params[f"config_{i}"]) + params[f"config_{i}"] = json.dumps({"position": i + offset}) + + cur.execute(cur.mogrify(pg_query, params)) + + return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id) + + +def get_widget(project_id, user_id, dashboard_id, widget_id): + with pg_client.PostgresClient() as cur: + pg_query = """SELECT metrics.*, metric_series.series + FROM dashboard_widgets + INNER JOIN dashboards USING (dashboard_id) + INNER JOIN metrics USING (metric_id) + LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series + FROM metric_series + WHERE metric_series.metric_id = metrics.metric_id + AND metric_series.deleted_at ISNULL + ) AS metric_series ON (TRUE) + WHERE dashboard_id = %(dashboard_id)s + AND widget_id = %(widget_id)s + AND (dashboards.is_public OR dashboards.user_id = %(userId)s) + AND dashboards.deleted_at IS NULL + AND metrics.deleted_at ISNULL + AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL) + AND (metrics.is_public OR metrics.user_id = %(userId)s);""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + +def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema): + with pg_client.PostgresClient() as cur: + pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) + SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id, + %(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config + WHERE EXISTS(SELECT 1 FROM dashboards + WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s + AND dashboard_id = %(dashboard_id)s + AND (dashboards.user_id = %(userId)s OR is_public)) + RETURNING *;""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} + params["config"] = json.dumps(data.config) + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + +def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema): + with pg_client.PostgresClient() as cur: + pg_query = """UPDATE dashboard_widgets + SET config= %(config)s + WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s + RETURNING *;""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, + "widget_id": widget_id, **data.dict()} + params["config"] = json.dumps(data.config) + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + +def remove_widget(project_id, user_id, dashboard_id, widget_id): + with pg_client.PostgresClient() as cur: + pg_query = """DELETE FROM dashboard_widgets + WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;""" + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id} + cur.execute(cur.mogrify(pg_query, params)) + return {"data": {"success": True}} + + +def pin_dashboard(project_id, user_id, dashboard_id): + with pg_client.PostgresClient() as cur: + pg_query = """UPDATE dashboards + SET is_pinned = FALSE + WHERE project_id=%(project_id)s; + UPDATE dashboards + SET is_pinned = True + WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL + RETURNING *;""" + params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + +def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema): + metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True) + return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id, + data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id)) + + +PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions, + schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, + schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, + schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, + schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, + schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, + schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, + schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration, + schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time, + schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time, + schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint, + schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, + schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit, + schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, + schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests, + schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render, + schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption, + schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu, + schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps, + schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, + schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx, + schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx, + schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains, + schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors, + schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type, + schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party, + schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location, + schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains, + schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser, + schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render, + schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, + schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption, + schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu, + schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps, + schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes, + schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, + schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time, + schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution, + schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend, + schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources, + schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time, + schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end, + schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type, + } + + +def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict): + return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data) + + +def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema): + raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id, + include_dashboard=False) + if raw_metric is None: + return None + metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric) + if metric.is_template: + return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict()) + else: + return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data, + metric=raw_metric) + + +def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema): + raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id) + if raw_metric is None: + return None + metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric) + if metric.is_template: + return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict()) + else: + return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"], + data=data, metric=raw_metric) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 933e3f800..272b86002 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -88,7 +88,7 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False): ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id})) rows += cur.fetchall() rows = helper.list_to_camel_case(rows) - rows = sorted(rows, key=lambda k: k["messageId"]) + rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"])) return rows @@ -339,9 +339,9 @@ def __generic_autocomplete(event: Event): class event_type: CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label") INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label") - LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="base_path") + LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path") CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name") - REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="url") + REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path") GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name") STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name") ERROR = Event(ui_type=schemas.EventType.error, table="events.errors", diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 1dc9e3347..16e95989d 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -261,7 +261,6 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None }} -@dev.timed def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): data.events = filter_stages(data.events) data.events = __fix_stages(data.events) @@ -313,7 +312,6 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): return f -@dev.timed def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None, start_date=None, end_date=None): if len(data.events) == 0: diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py index eacd6cd86..5aacb1375 100644 --- a/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/heatmaps.py @@ -3,7 +3,6 @@ from chalicelib.utils import helper, pg_client from chalicelib.utils import dev -@dev.timed def get_by_url(project_id, data): args = {"startDate": data.get('startDate', TimeUTC.now(delta_days=-30)), "endDate": data.get('endDate', TimeUTC.now()), diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py index 08adfd3ca..c04fd3981 100644 --- a/api/chalicelib/core/insights.py +++ b/api/chalicelib/core/insights.py @@ -1,11 +1,8 @@ import schemas -from chalicelib.core import sessions_metas +from chalicelib.core.metrics import __get_constraints, __get_constraint_values from chalicelib.utils import helper, dev from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.metrics_helper import __get_step_size -import math -from chalicelib.core.dashboard import __get_constraints, __get_constraint_values def __transform_journey(rows): @@ -24,14 +21,13 @@ def __transform_journey(rows): JOURNEY_DEPTH = 5 JOURNEY_TYPES = { - "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, + "PAGES": {"table": "events.pages", "column": "path", "table_id": "message_id"}, "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} } -@dev.timed def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", time_constraint=True) @@ -184,7 +180,6 @@ def __complete_acquisition(rows, start_date, end_date=None): return rows -@dev.timed def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -232,7 +227,6 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT } -@dev.timed def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -280,7 +274,6 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en } -@dev.timed def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -370,7 +363,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en } -@dev.timed + def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -463,7 +456,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), } -@dev.timed + def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -528,7 +521,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da return popularity -@dev.timed + def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -598,7 +591,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -658,7 +651,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -723,7 +716,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -760,7 +753,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en return rows -@dev.timed + def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): @@ -802,7 +795,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime return row_users -@dev.timed + def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) @@ -827,7 +820,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes return helper.dict_to_camel_case(row_users) -@dev.timed + def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", @@ -892,7 +885,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi } -@dev.timed + def search(text, feature_type, project_id, platform=None): if not feature_type: resource_type = "ALL" @@ -930,4 +923,4 @@ def search(text, feature_type, project_id, platform=None): rows = cur.fetchall() else: return [] - return [helper.dict_to_camel_case(row) for row in rows] \ No newline at end of file + return [helper.dict_to_camel_case(row) for row in rows] diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py index ea9c6c24e..7d8c956cf 100644 --- a/api/chalicelib/core/integration_jira_cloud.py +++ b/api/chalicelib/core/integration_jira_cloud.py @@ -15,10 +15,17 @@ class JIRAIntegration(integration_base.BaseIntegration): # TODO: enable super-constructor when OAuth is done # super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy) self._user_id = user_id - i = self.get() - if i is None: + self.integration = self.get() + if self.integration is None: return - self.issue_handler = JIRACloudIntegrationIssue(token=i["token"], username=i["username"], url=i["url"]) + self.integration["valid"] = True + try: + self.issue_handler = JIRACloudIntegrationIssue(token=self.integration["token"], + username=self.integration["username"], + url=self.integration["url"]) + except Exception as e: + self.issue_handler = None + self.integration["valid"] = False @property def provider(self): @@ -37,10 +44,10 @@ class JIRAIntegration(integration_base.BaseIntegration): return helper.dict_to_camel_case(cur.fetchone()) def get_obfuscated(self): - integration = self.get() - if integration is None: + if self.integration is None: return None - integration["token"] = obfuscate_string(integration["token"]) + integration = dict(self.integration) + integration["token"] = obfuscate_string(self.integration["token"]) integration["provider"] = self.provider.lower() return integration @@ -90,14 +97,13 @@ class JIRAIntegration(integration_base.BaseIntegration): return {"state": "success"} def add_edit(self, data): - s = self.get() - if s is not None: + if self.integration is not None: return self.update( changes={ "username": data["username"], "token": data["token"] \ if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \ - else s["token"], + else self.integration["token"], "url": data["url"] }, obfuscate=True diff --git a/api/chalicelib/core/integrations_manager.py b/api/chalicelib/core/integrations_manager.py index fca271870..ef63a7d96 100644 --- a/api/chalicelib/core/integrations_manager.py +++ b/api/chalicelib/core/integrations_manager.py @@ -36,7 +36,10 @@ def get_integration(tenant_id, user_id, tool=None): if tool not in SUPPORTED_TOOLS: return {"errors": [f"issue tracking tool not supported yet, available: {SUPPORTED_TOOLS}"]}, None if tool == integration_jira_cloud.PROVIDER: - return None, integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id) + integration = integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id) + if integration.integration is not None and not integration.integration.get("valid", True): + return {"errors": ["JIRA: connexion issue/unauthorized"]}, integration + return None, integration elif tool == integration_github.PROVIDER: return None, integration_github.GitHubIntegration(tenant_id=tenant_id, user_id=user_id) return {"errors": ["lost integration"]}, None diff --git a/api/chalicelib/core/metadata.py b/api/chalicelib/core/metadata.py index 301503162..2eb62a6b4 100644 --- a/api/chalicelib/core/metadata.py +++ b/api/chalicelib/core/metadata.py @@ -273,7 +273,6 @@ def add_edit_delete(tenant_id, project_id, new_metas): return {"data": get(project_id)} -@dev.timed def get_remaining_metadata_with_count(tenant_id): all_projects = projects.get_projects(tenant_id=tenant_id) results = [] diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/metrics.py similarity index 72% rename from api/chalicelib/core/dashboard.py rename to api/chalicelib/core/metrics.py index 9cd88eb6a..fb8241440 100644 --- a/api/chalicelib/core/dashboard.py +++ b/api/chalicelib/core/metrics.py @@ -1,11 +1,12 @@ +import math + import schemas from chalicelib.core import metadata from chalicelib.utils import args_transformer -from chalicelib.utils import helper, dev +from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.metrics_helper import __get_step_size -import math # Written by David Aznaurov, inspired by numpy.quantile @@ -75,8 +76,6 @@ METADATA_FIELDS = {"userId": "user_id", "metadata9": "metadata_9", "metadata10": "metadata_10"} -from chalicelib.core import sessions_metas - def __get_meta_constraint(project_id, data): if len(data.get("filters", [])) == 0: @@ -127,7 +126,6 @@ SESSIONS_META_FIELDS = {"revId": "rev_id", "browser": "user_browser"} -@dev.timed def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -136,22 +134,21 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args) with pg_client.PostgresClient() as cur: - pg_query = f"""\ - SELECT generated_timestamp AS timestamp, - COALESCE(COUNT(sessions), 0) AS count - FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT 1 - FROM public.sessions - WHERE {" AND ".join(pg_sub_query_chart)} - ) AS sessions ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""" + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(sessions), 0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT 1 + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() results = { - "count": sum([r["count"] for r in rows]), + "value": sum([r["value"] for r in rows]), "chart": rows } @@ -159,8 +156,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) endTimestamp = startTimestamp startTimestamp = endTimestamp - diff - pg_query = f"""\ - SELECT COUNT(sessions.session_id) AS count + pg_query = f"""SELECT COUNT(sessions.session_id) AS count FROM public.sessions WHERE {" AND ".join(pg_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, @@ -170,12 +166,11 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) count = cur.fetchone()["count"] - results["countProgress"] = helper.__progress(old_val=count, new_val=results["count"]) - + results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count return results -@dev.timed def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -208,8 +203,8 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() results = { - "count": 0 if len(rows) == 0 else __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, - pg_sub_query_subset), + "count": 0 if len(rows) == 0 else \ + __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_sub_query_subset), "impactedSessions": sum([r["count"] for r in rows]), "chart": rows } @@ -234,7 +229,6 @@ def __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_su return cur.fetchone()["count"] -@dev.timed def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -298,7 +292,6 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return rows -@dev.timed def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): with pg_client.PostgresClient() as cur: @@ -316,7 +309,6 @@ def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return results -@dev.timed def __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) pg_sub_query.append("pages.timestamp>=%(startTimestamp)s") @@ -336,7 +328,6 @@ def __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args): return rows -@dev.timed def get_application_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): with pg_client.PostgresClient() as cur: @@ -359,10 +350,9 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, ** pg_sub_query.append("pages.timestamp > %(endTimestamp)s") pg_sub_query.append("pages.load_time > 0") pg_sub_query.append("pages.load_time IS NOT NULL") - pg_query = f"""\ - SELECT COALESCE(AVG(pages.load_time) ,0) AS avg_page_load_time - FROM events.pages INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)};""" + pg_query = f"""SELECT COALESCE(AVG(pages.load_time) ,0) AS avg_page_load_time + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} @@ -372,10 +362,9 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, ** pg_sub_query = __get_constraints(project_id=project_id, data=args) pg_sub_query.append("resources.duration > 0") pg_sub_query.append("resources.type= %(type)s") - pg_query = f"""\ - SELECT COALESCE(AVG(resources.duration),0) AS avg - FROM events.resources INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query)};""" + pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS avg + FROM events.resources INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)})) @@ -390,7 +379,6 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, ** return result -@dev.timed def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): with pg_client.PostgresClient() as cur: @@ -409,12 +397,11 @@ def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) - - pg_query = f"""\ - SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages, - COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration - FROM public.sessions - WHERE {" AND ".join(pg_sub_query)};""" + pg_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)") + pg_query = f"""SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages, + COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration + FROM public.sessions + WHERE {" AND ".join(pg_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} @@ -423,7 +410,6 @@ def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args): return row -@dev.timed def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -460,16 +446,16 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), COALESCE(AVG(duration), 0) AS avg_duration FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT resources.duration - FROM events.resources - INNER JOIN public.sessions USING (session_id) + FROM events.resources INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query_chart)} ) AS sessions ON (TRUE) GROUP BY generated_timestamp ORDER BY generated_timestamp) AS chart ) AS chart ON (TRUE);""" - cur.execute(cur.mogrify(pg_query, {"step_size": step_size,"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)})) + cur.execute( + cur.mogrify(pg_query, {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})) rows = cur.fetchall() for i in range(len(rows)): rows[i]["sessions"] = rows[i].pop("sessions_count") @@ -478,7 +464,6 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return sorted(rows, key=lambda k: k["sessions"], reverse=True) -@dev.timed def __get_performance_constraint(l): if len(l) == 0: return "" @@ -486,7 +471,6 @@ def __get_performance_constraint(l): return f"AND ({' OR '.join(l)})" -@dev.timed def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=19, resources=None, **args): step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) @@ -621,7 +605,6 @@ def __get_resource_db_type_from_type(resource_type): return {v: k for k, v in RESOURCS_TYPE_TO_DB_TYPE.items()}.get(resource_type, resource_type) -@dev.timed def search(text, resource_type, project_id, performance=False, pages_only=False, events_only=False, metadata=False, key=None, platform=None): if not resource_type: @@ -651,7 +634,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, WHERE {" AND ".join(pg_sub_query)} ORDER BY url, type ASC) AS ranked_values WHERE ranked_values.r<=5;""" - print(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)})) rows = cur.fetchall() rows = [{"value": i["value"], "type": __get_resource_type_from_db_type(i["key"])} for i in rows] @@ -671,9 +653,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.pages INNER JOIN public.sessions USING(session_id) WHERE {" AND ".join(pg_sub_query)} AND positionUTF8(url_path, %(value)s) != 0 LIMIT 10);""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text.lower()), - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text.lower()), "platform_0": platform})) @@ -690,10 +669,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.resources INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text), - "resource_type": resource_type, - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "resource_type": resource_type, @@ -708,9 +683,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text), - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "platform_0": platform})) @@ -722,9 +694,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.inputs INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text), - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "platform_0": platform})) @@ -736,9 +705,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.clicks INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text), - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "platform_0": platform})) @@ -757,9 +723,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM sessions WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, - {"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key, - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key, "platform_0": platform})) @@ -784,10 +747,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, AND sessions.{SESSIONS_META_FIELDS[k]} ILIKE %(value)s LIMIT 10)""") pg_query = " UNION ALL ".join(pg_query) - print(cur.mogrify(pg_query, - {"project_id": project_id, "value": helper.string_to_sql_like(text), - "key": key, - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key, @@ -798,7 +757,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, return [helper.dict_to_camel_case(row) for row in rows] -@dev.timed def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -854,7 +812,6 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day return rows -@dev.timed def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -866,8 +823,6 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1), pg_sub_query_subset.append("resources.timestamp>=%(startTimestamp)s") pg_sub_query_subset.append("resources.timestamp<%(endTimestamp)s") - - with pg_client.PostgresClient() as cur: pg_query = f"""WITH resources AS (SELECT resources.session_id, resources.url_hostpath, @@ -922,7 +877,6 @@ def dashboard_args(params): return args -@dev.timed def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=19, type=None, url=None, **args): @@ -971,7 +925,6 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days return {"avg": avg, "chart": rows} -@dev.timed def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=19, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -993,13 +946,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM public.sessions INNER JOIN events.pages USING (session_id) WHERE {" AND ".join(pg_sub_query_subset)}) - SELECT COALESCE(avg, 0) AS avg, chart + SELECT COALESCE(avg, 0) AS value, chart FROM (SELECT AVG(dom_building_time) FROM pages) AS avg LEFT JOIN (SELECT jsonb_agg(chart) AS chart FROM ( SELECT generated_timestamp AS timestamp, - COALESCE(AVG(dom_building_time), 0) AS avg + COALESCE(AVG(dom_building_time), 0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT pages.dom_building_time FROM pages @@ -1014,10 +967,10 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() + row["unit"] = schemas.TemplatePredefinedUnits.millisecond return row -@dev.timed def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), type="all", density=19, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1091,7 +1044,6 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return rows -@dev.timed def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1110,7 +1062,6 @@ def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return {"count": sum(i["count"] for i in rows), "chart": helper.list_to_camel_case(rows)} -@dev.timed def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1139,7 +1090,6 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- return {"avg": avg, "chart": helper.list_to_camel_case(rows)} -@dev.timed def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1155,7 +1105,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 pg_sub_query_chart.append(f"url = %(value)s") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(AVG(pages.response_time),0) AS avg + COALESCE(AVG(pages.response_time),0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT response_time @@ -1176,10 +1126,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"avg": avg, "chart": rows} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} -@dev.timed def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=20, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1297,7 +1246,6 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( return result -@dev.timed def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1317,7 +1265,6 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1 return rows -@dev.timed def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), value=None, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1368,7 +1315,6 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return helper.dict_to_camel_case(row) -@dev.timed def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, url=None, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1384,11 +1330,11 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), pg_query = f"""WITH pages AS(SELECT pages.visually_complete,pages.timestamp FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query_subset)}) - SELECT COALESCE((SELECT AVG(pages.visually_complete) FROM pages),0) AS avg, + SELECT COALESCE((SELECT AVG(pages.visually_complete) FROM pages),0) AS value, jsonb_agg(chart) AS chart FROM (SELECT generated_timestamp AS timestamp, - COALESCE(AVG(visually_complete), 0) AS avg + COALESCE(AVG(visually_complete), 0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT pages.visually_complete FROM pages @@ -1402,10 +1348,10 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() + row["unit"] = schemas.TemplatePredefinedUnits.millisecond return row -@dev.timed def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), value=None, density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1444,7 +1390,6 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d return rows -@dev.timed def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1454,7 +1399,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(AVG(performance.avg_used_js_heap_size),0) AS avg_used_js_heap_size + COALESCE(AVG(performance.avg_used_js_heap_size),0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT avg_used_js_heap_size @@ -1474,10 +1419,9 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"avgUsedJsHeapSize": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.memory} -@dev.timed def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1487,7 +1431,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(AVG(performance.avg_cpu),0) AS avg_cpu + COALESCE(AVG(performance.avg_cpu),0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT avg_cpu @@ -1507,20 +1451,21 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"avgCpu": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), + "unit": schemas.TemplatePredefinedUnits.percentage} -@dev.timed def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) pg_sub_query = __get_constraints(project_id=project_id, data=args) pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args) - + pg_sub_query.append("performance.avg_fps>0") + pg_sub_query_chart.append("performance.avg_fps>0") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT generated_timestamp AS timestamp, - COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS avg_fps + COALESCE(AVG(performance.avg_fps),0) AS value FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp LEFT JOIN LATERAL ( SELECT avg_fps @@ -1535,15 +1480,14 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "endTimestamp": endTimestamp, **__get_constraint_values(args)} cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() - pg_query = f"""SELECT COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS avg + pg_query = f"""SELECT COALESCE(AVG(performance.avg_fps),0) AS avg FROM events.performance INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"avgFps": avg, "chart": helper.list_to_camel_case(rows)} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.frame} -@dev.timed def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1617,7 +1561,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), def __get_neutral(rows, add_All_if_empty=True): neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]} - if add_All_if_empty and len(neutral.keys()) == 0: + if add_All_if_empty and len(neutral.keys()) <= 1: neutral = {"All": 0} return neutral @@ -1628,7 +1572,6 @@ def __merge_rows_with_neutral(rows, neutral): return rows -@dev.timed def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=6, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1679,7 +1622,6 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return result -@dev.timed def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=6, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1720,7 +1662,6 @@ def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1) return rows -@dev.timed def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=6, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1769,7 +1710,6 @@ def __nested_array_to_dict_array(rows, key="url_host", value="count"): return rows -@dev.timed def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1801,7 +1741,6 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return {"avg": avg, "partition": rows} -@dev.timed def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1815,7 +1754,7 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1) WHERE {" AND ".join(pg_sub_query)} GROUP BY resources.url_host ORDER BY errors_count DESC - LIMIT 10;""" + LIMIT 5;""" cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)})) @@ -1823,7 +1762,6 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1) return helper.list_to_camel_case(rows) -@dev.timed def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1866,7 +1804,6 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=- return {"count": sum(i["count"] for i in rows), "chart": rows} -@dev.timed def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1877,7 +1814,7 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT with pg_client.PostgresClient() as cur: pg_query = f"""SELECT resources.method, resources.url_hostpath, - COUNT(resources.session_id) AS all_requests, + COUNT(resources.session_id) AS all_requests, SUM(CASE WHEN resources.status/100 = 4 THEN 1 ELSE 0 END) AS _4xx, SUM(CASE WHEN resources.status/100 = 5 THEN 1 ELSE 0 END) AS _5xx FROM events.resources INNER JOIN sessions USING (session_id) @@ -1892,7 +1829,6 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT return helper.list_to_camel_case(rows) -@dev.timed def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1903,7 +1839,7 @@ def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT resources.method, resources.url_hostpath, - COUNT(resources.session_id) AS all_requests + COUNT(resources.session_id) AS all_requests FROM events.resources INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY resources.method, resources.url_hostpath @@ -1916,7 +1852,6 @@ def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return helper.list_to_camel_case(rows) -@dev.timed def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, **args): pg_sub_query = __get_constraints(project_id=project_id, data=args) @@ -1927,7 +1862,7 @@ def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT resources.method, resources.url_hostpath, - COUNT(resources.session_id) AS all_requests + COUNT(resources.session_id) AS all_requests FROM events.resources INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} GROUP BY resources.method, resources.url_hostpath @@ -1940,7 +1875,6 @@ def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return helper.list_to_camel_case(rows) -@dev.timed def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), platform=None, density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -1952,7 +1886,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e pg_sub_query_subset.append("resources.status > 200") pg_sub_query_subset_e = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors", - time_constraint=False) + time_constraint=False) pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True, data=args, main_table="", time_column="timestamp", project=False, duration=False) @@ -1977,7 +1911,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e WHERE {" AND ".join(pg_sub_query_subset_e)} AND source = 'js_exception' ) - SELECT generated_timestamp AS timestamp, + SELECT generated_timestamp AS timestamp, COALESCE(SUM(CASE WHEN status / 100 = 4 THEN 1 ELSE 0 END), 0) AS _4xx, COALESCE(SUM(CASE WHEN status / 100 = 5 THEN 1 ELSE 0 END), 0) AS _5xx, COALESCE((SELECT COUNT(*) @@ -2005,7 +1939,6 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e return rows -@dev.timed def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -2060,7 +1993,6 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d return helper.list_to_camel_case(__merge_charts(response_end, actions)) -@dev.timed def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -2142,7 +2074,6 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de return {**row_sessions, **row_errors, "chart": chart} -@dev.timed def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -2193,7 +2124,6 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de return helper.list_to_camel_case(rows) -@dev.timed def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -2230,7 +2160,6 @@ def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_day return rows -@dev.timed def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) @@ -2284,3 +2213,734 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1) rows = cur.fetchall() return rows + + +def __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("resources.duration > 0") + pg_sub_query.append("resources.type= %(type)s") + pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value + FROM events.resources INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" + + cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})) + row = cur.fetchone() + return row + + +def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + results = row + results["chart"] = get_performance_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def get_performance_avg_image_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + img_constraints = [] + + img_constraints_vals = {} + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="resources", time_column="timestamp", + duration=False) + pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s") + pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s") + + pg_query = f"""WITH resources AS (SELECT resources.duration, resources.timestamp + FROM events.resources INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + AND resources.type = 'img' AND resources.duration>0 + {(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(resources.duration),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT resources.duration + FROM resources + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS resources ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **img_constraints_vals, **__get_constraint_values(args)})) + rows = cur.fetchall() + rows = helper.list_to_camel_case(rows) + + return rows + + +def __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("pages.timestamp >= %(startTimestamp)s") + pg_sub_query.append("pages.timestamp > %(endTimestamp)s") + pg_sub_query.append("pages.load_time > 0") + pg_sub_query.append("pages.load_time IS NOT NULL") + pg_query = f"""SELECT COALESCE(AVG(pages.load_time) ,0) AS value + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return row + + +def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + results = row + results["chart"] = get_performance_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def get_performance_avg_page_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + location_constraints = [] + location_constraints_vals = {} + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="pages", time_column="timestamp", + duration=False) + pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s") + pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s") + pg_query = f"""WITH pages AS(SELECT pages.load_time, timestamp + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL + {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.load_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT pages.load_time + FROM pages + WHERE {" AND ".join(pg_sub_query_chart)} + {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **location_constraints_vals, **__get_constraint_values(args)})) + rows = cur.fetchall() + return rows + + +def __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("resources.duration > 0") + pg_sub_query.append("resources.type= %(type)s") + pg_query = f"""SELECT COALESCE(AVG(resources.duration),0) AS value + FROM events.resources INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" + + cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})) + cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})) + + row = cur.fetchone() + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return row + + +def get_application_activity_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + results = row + results["chart"] = get_performance_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def get_performance_avg_request_load_time(cur, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + request_constraints = [] + request_constraints_vals = {} + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="resources", time_column="timestamp", + duration=False) + pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s") + pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s") + + pg_query = f"""WITH resources AS(SELECT resources.duration, resources.timestamp + FROM events.resources INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + AND resources.type = 'fetch' AND resources.duration>0 + {(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(resources.duration),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT resources.duration + FROM resources + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS resources ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **request_constraints_vals, **__get_constraint_values(args)})) + rows = cur.fetchall() + + return rows + + +def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = __get_page_metrics_avg_dom_content_load_start_chart(cur, project_id, startTimestamp, + endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("pages.timestamp>=%(startTimestamp)s") + pg_sub_query.append("pages.timestamp<%(endTimestamp)s") + pg_sub_query.append("pages.dom_content_loaded_time > 0") + pg_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_time), 0) AS value + FROM (SELECT pages.dom_content_loaded_time + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + ) AS pages;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return row + + +def __get_page_metrics_avg_dom_content_load_start_chart(cur, project_id, startTimestamp, endTimestamp, density=19, + **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="pages", time_column="timestamp", + duration=False) + pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s") + pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s") + pg_sub_query_subset.append("pages.dom_content_loaded_time > 0") + + pg_query = f"""WITH pages AS(SELECT pages.dom_content_loaded_time, pages.timestamp + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.dom_content_loaded_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT pages.dom_content_loaded_time + FROM pages + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) + rows = cur.fetchall() + return rows + + +def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + results = helper.dict_to_camel_case(rows[0]) + results["chart"] = __get_page_metrics_avg_first_contentful_pixel_chart(cur, project_id, startTimestamp, + endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + previous = helper.dict_to_camel_case(rows[0]) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("pages.timestamp>=%(startTimestamp)s") + pg_sub_query.append("pages.timestamp<%(endTimestamp)s") + pg_sub_query.append("pages.first_contentful_paint_time > 0") + pg_query = f"""SELECT COALESCE(AVG(pages.first_contentful_paint_time), 0) AS value + FROM (SELECT pages.first_contentful_paint_time + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + ) AS pages;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return rows + + +def __get_page_metrics_avg_first_contentful_pixel_chart(cur, project_id, startTimestamp, endTimestamp, density=20, + **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="pages", time_column="timestamp", + duration=False) + pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s") + pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s") + pg_sub_query_subset.append("pages.first_contentful_paint_time > 0") + + pg_query = f"""WITH pages AS(SELECT pages.first_contentful_paint_time, pages.timestamp + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.first_contentful_paint_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT pages.first_contentful_paint_time + FROM pages + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) + rows = cur.fetchall() + return rows + + +def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp, + endTimestamp, **args) + + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args) + + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count + return results + + +def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("sessions.pages_count>0") + pg_query = f"""SELECT COALESCE(CEIL(AVG(sessions.pages_count)),0) AS value + FROM public.sessions + WHERE {" AND ".join(pg_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return row + + +def __get_user_activity_avg_visited_pages_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, + chart=False, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="sessions", time_column="start_ts", + duration=False) + pg_sub_query_subset.append("sessions.duration IS NOT NULL") + + pg_query = f"""WITH sessions AS(SELECT sessions.pages_count, sessions.start_ts + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_subset)} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(sessions.pages_count),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT sessions.pages_count + FROM sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) + rows = cur.fetchall() + return rows + + +def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with pg_client.PostgresClient() as cur: + row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = __get_user_activity_avg_session_duration_chart(cur, project_id, startTimestamp, + endTimestamp, **args) + + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args) + + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query.append("sessions.duration IS NOT NULL") + pg_sub_query.append("sessions.duration > 0") + pg_query = f"""SELECT COALESCE(AVG(sessions.duration),0) AS value + FROM public.sessions + WHERE {" AND ".join(pg_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + return row + + +def __get_user_activity_avg_session_duration_chart(cur, project_id, startTimestamp, endTimestamp, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query_subset = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="sessions", time_column="start_ts", + duration=False) + pg_sub_query_subset.append("sessions.duration IS NOT NULL") + pg_sub_query_subset.append("sessions.duration > 0") + + pg_query = f"""WITH sessions AS(SELECT sessions.duration, sessions.start_ts + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_subset)} + ) + SELECT generated_timestamp AS timestamp, + COALESCE(AVG(sessions.duration),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT sessions.duration + FROM sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS sessions ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) + rows = cur.fetchall() + return rows + + +def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COALESCE(AVG(pages.response_time), 0) AS value + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND pages.timestamp >= %(startTimestamp)s + AND pages.timestamp < %(endTimestamp)s + AND pages.response_time > 0;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.response_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT response_time + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} AND pages.response_time > 0 + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(row) + + +def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COALESCE(AVG(pages.first_paint_time), 0) AS value + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND pages.timestamp >= %(startTimestamp)s + AND pages.timestamp < %(endTimestamp)s + AND pages.first_paint_time > 0;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.first_paint_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT first_paint_time + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} AND pages.first_paint_time > 0 + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(row) + + +def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=19, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + pg_sub_query.append("pages.dom_content_loaded_time>0") + pg_sub_query_chart.append("pages.dom_content_loaded_time>0") + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_time), 0) AS value + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND pages.timestamp >= %(startTimestamp)s + AND pages.timestamp < %(endTimestamp)s + AND pages.dom_content_loaded_time > 0;""" + params = {"step_size": step_size, + "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.dom_content_loaded_time),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT dom_content_loaded_time + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(row) + + +def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COALESCE(AVG(pages.ttfb), 0) AS value + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND pages.timestamp >= %(startTimestamp)s + AND pages.timestamp < %(endTimestamp)s + AND pages.ttfb > 0;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.ttfb),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT ttfb + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} AND pages.ttfb > 0 + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(row) + + +def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1) + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + pg_sub_query.append("pages.time_to_interactive > 0") + pg_sub_query_chart.append("pages.time_to_interactive > 0") + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive), 0) AS value + FROM events.pages + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND pages.timestamp >= %(startTimestamp)s + AND pages.timestamp < %(endTimestamp)s;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(AVG(pages.time_to_interactive),0) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT time_to_interactive + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp ASC;""" + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + row["chart"] = helper.list_to_camel_case(rows) + row["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(row) + + +def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1) + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + pg_sub_query = __get_constraints(project_id=project_id, data=args) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False, + chart=True, data=args, main_table="pages", time_column="timestamp", + duration=False) + + if value is not None: + pg_sub_query.append("pages.path = %(value)s") + pg_sub_query_chart.append("pages.path = %(value)s") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(pages.session_id) AS value + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)};""" + cur.execute(cur.mogrify(pg_query, {"project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)})) + row = cur.fetchone() + pg_query = f"""WITH pages AS(SELECT pages.timestamp + FROM events.pages INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + ) + SELECT generated_timestamp AS timestamp, + COUNT(pages.*) AS value + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( + SELECT 1 + FROM pages + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS pages ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) + rows = cur.fetchall() + row["chart"] = rows + row["unit"] = schemas.TemplatePredefinedUnits.count + return helper.dict_to_camel_case(row) diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index c5ae912aa..0b0bd963f 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -41,78 +41,53 @@ def __create(tenant_id, name): return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) -@dev.timed -def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, version=False, - last_tracker_version=None): +def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False): with pg_client.PostgresClient() as cur: - tracker_query = "" - if last_tracker_version is not None and len(last_tracker_version) > 0: - tracker_query = cur.mogrify( - """,(SELECT tracker_version FROM public.sessions - WHERE sessions.project_id = s.project_id - AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""", - {"version": last_tracker_version}).decode('UTF-8') - elif version: - tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version" - cur.execute(f"""\ SELECT - s.project_id, s.name, s.project_key + s.project_id, s.name, s.project_key, s.save_request_payloads {',s.gdpr' if gdpr else ''} {',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''} {',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''} - {tracker_query} FROM public.projects AS s {'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''} WHERE s.deleted_at IS NULL - ORDER BY s.project_id;""" - ) + ORDER BY s.project_id;""") rows = cur.fetchall() if recording_state: project_ids = [f'({r["project_id"]})' for r in rows] - query = f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last - FROM (VALUES {",".join(project_ids)}) AS projects(project_id) - LEFT JOIN sessions USING (project_id) - GROUP BY project_id;""" - cur.execute( - query=query - ) + query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last + FROM (VALUES {",".join(project_ids)}) AS projects(project_id) + LEFT JOIN sessions USING (project_id) + WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s + GROUP BY project_id;""", + {"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)}) + + cur.execute(query=query) status = cur.fetchall() for r in rows: + r["status"] = "red" for s in status: if s["project_id"] == r["project_id"]: - if s["last"] < TimeUTC.now(-2): - r["status"] = "red" - elif s["last"] < TimeUTC.now(-1): + if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1): r["status"] = "yellow" - else: + elif s["last"] >= TimeUTC.now(-1): r["status"] = "green" break return helper.list_to_camel_case(rows) -def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None, version=False, - last_tracker_version=None): +def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None): with pg_client.PostgresClient() as cur: - tracker_query = "" - if last_tracker_version is not None and len(last_tracker_version) > 0: - tracker_query = cur.mogrify( - """,(SELECT tracker_version FROM public.sessions - WHERE sessions.project_id = s.project_id - AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""", - {"version": last_tracker_version}).decode('UTF-8') - elif version: - tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version" - query = cur.mogrify(f"""\ SELECT s.project_id, s.project_key, - s.name + s.name, + s.save_request_payloads {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} - {tracker_query} FROM public.projects AS s where s.project_id =%(project_id)s AND s.deleted_at IS NULL @@ -244,7 +219,8 @@ def get_project_key(project_id): where project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) - return cur.fetchone()["project_key"] + project = cur.fetchone() + return project["project_key"] if project is not None else None def get_capture_status(project_id): @@ -280,3 +256,13 @@ def update_capture_status(project_id, changes): ) return changes + + +def get_projects_ids(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute(f"""SELECT s.project_id + FROM public.projects AS s + WHERE s.deleted_at IS NULL + ORDER BY s.project_id;""") + rows = cur.fetchall() + return [r["project_id"] for r in rows] diff --git a/api/chalicelib/core/resources.py b/api/chalicelib/core/resources.py index 6a7e395f8..d85e56b6f 100644 --- a/api/chalicelib/core/resources.py +++ b/api/chalicelib/core/resources.py @@ -1,23 +1,23 @@ from chalicelib.utils import helper, pg_client -def get_by_session_id(session_id): +def get_by_session_id(session_id, project_id): with pg_client.PostgresClient() as cur: ch_query = """\ SELECT timestamp AS datetime, url, type, - duration, + resources.duration AS duration, ttfb, header_size, encoded_body_size, decoded_body_size, success, COALESCE(status, CASE WHEN success THEN 200 END) AS status - FROM events.resources - WHERE session_id = %(session_id)s;""" - params = {"session_id": session_id} + FROM events.resources INNER JOIN sessions USING (session_id) + WHERE session_id = %(session_id)s AND project_id= %(project_id)s;""" + params = {"session_id": session_id, "project_id": project_id} cur.execute(cur.mogrify(ch_query, params)) rows = cur.fetchall() return helper.list_to_camel_case(rows) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 1903cc08b..adc549d1e 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -3,7 +3,7 @@ from typing import List import schemas from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist, performance_event -from chalicelib.utils import pg_client, helper, dev, metrics_helper +from chalicelib.utils import pg_client, helper, metrics_helper SESSION_PROJECTION_COLS = """s.project_id, s.session_id::text AS session_id, @@ -39,7 +39,8 @@ def __group_metadata(session, project_metadata): return meta -def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False): +def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False, + live=True): with pg_client.PostgresClient() as cur: extra_query = [] if include_fav_viewed: @@ -93,13 +94,13 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['resources'] = resources.get_by_session_id(session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id) - data['live'] = assist.is_live(project_id=project_id, - session_id=session_id, - project_key=data["projectKey"]) + data['live'] = live and assist.is_live(project_id=project_id, + session_id=session_id, + project_key=data["projectKey"]) data["inDB"] = True return data else: @@ -167,7 +168,6 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): return op in [schemas.SearchEventOperator._is_undefined] -@dev.timed def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None): full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, @@ -233,20 +233,19 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e data.order = "DESC" sort = 'session_id' if data.sort is not None and data.sort != "session_id": - sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - else: - sort = 'session_id' + # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) + sort = helper.key_to_snake_case(data.sort) meta_keys = metadata.get(project_id=project_id) main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""", + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", full_args) # print("--------------------") # print(main_query) @@ -280,9 +279,9 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e for i, s in enumerate(sessions): sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ if sessions[i][f'metadata_{k["index"]}'] is not None} - if not data.group_by_user and data.sort is not None and data.sort != "session_id": - sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], - reverse=data.order.upper() == "DESC") + # if not data.group_by_user and data.sort is not None and data.sort != "session_id": + # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], + # reverse=data.order.upper() == "DESC") return { 'total': total, 'sessions': helper.list_to_camel_case(sessions) @@ -354,8 +353,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d full_args[arg_name] = metric_value[i] extra_where = f"WHERE ({' OR '.join(extra_where)})" elif metric_of == schemas.TableMetricOfType.visited_url: - main_col = "base_path" - extra_col = ", base_path" + main_col = "path" + extra_col = ", path" main_query = cur.mogrify(f"""{pre_query} SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values FROM (SELECT {main_col} AS name, @@ -659,11 +658,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr **_multiple_values(event.value, value_key=e_k), **_multiple_values(event.source, value_key=s_k)} - # if event_type not in list(events.SUPPORTED_TYPES.keys()) \ - # or event.value in [None, "", "*"] \ - # and (event_type != events.event_type.ERROR.ui_type \ - # or event_type != events.event_type.ERROR_IOS.ui_type): - # continue if event_type == events.event_type.CLICK.ui_type: event_from = event_from % f"{events.event_type.CLICK.table} AS main " if not is_any: diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 2e698dcfd..a868ef2d3 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -24,7 +24,7 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} -@dev.timed + def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: """ Add minimal timestamp @@ -293,7 +293,7 @@ def pearson_corr(x: list, y: list): return r, confidence, False -@dev.timed + def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage): """ Returns two lists with binary values 0/1: @@ -363,7 +363,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ return transitions, errors, all_errors, n_sess_affected -@dev.timed + def get_affected_users_for_all_issues(rows, first_stage, last_stage): """ @@ -415,7 +415,7 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts -@dev.timed + def count_sessions(rows, n_stages): session_counts = {i: set() for i in range(1, n_stages + 1)} for ind, row in enumerate(rows): @@ -467,7 +467,7 @@ def get_stages(stages, rows): return stages_list -@dev.timed + def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False): """ @@ -544,7 +544,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -@dev.timed + def get_top_insights(filter_d, project_id): output = [] stages = filter_d.get("events", []) @@ -582,7 +582,7 @@ def get_top_insights(filter_d, project_id): return stages_list, total_drop_due_to_issues -@dev.timed + def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): output = dict({'critical_issues_count': 0}) stages = filter_d.get("events", []) diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index 48f403f57..fa27fbe1c 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -27,10 +27,11 @@ def compute(): t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0), t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0), t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0) - RETURNING *,(SELECT email FROM public.users WHERE role='owner' LIMIT 1);""" + RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + (SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);""" ) data = cur.fetchone() - requests.post('https://parrot.asayer.io/os/telemetry', json={"stats": [process_data(data)]}) + requests.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(data)]}) def new_client(): @@ -40,4 +41,4 @@ def new_client(): (SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email FROM public.tenants;""") data = cur.fetchone() - requests.post('https://parrot.asayer.io/os/signup', json=process_data(data)) + requests.post('https://api.openreplay.com/os/signup', json=process_data(data)) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index b4ac0f869..ceada34f8 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -571,7 +571,6 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): ) -@dev.timed def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( diff --git a/api/chalicelib/utils/TimeUTC.py b/api/chalicelib/utils/TimeUTC.py index bac7a027f..d399e1651 100644 --- a/api/chalicelib/utils/TimeUTC.py +++ b/api/chalicelib/utils/TimeUTC.py @@ -88,13 +88,18 @@ class TimeUTC: return datetime.utcfromtimestamp(ts // 1000).strftime(fmt) @staticmethod - def human_to_timestamp(ts, pattern): + def human_to_timestamp(ts, pattern="%Y-%m-%dT%H:%M:%S.%f"): return int(datetime.strptime(ts, pattern).timestamp() * 1000) @staticmethod def datetime_to_timestamp(date): if date is None: return None + if isinstance(date, str): + fp = date.find(".") + if fp > 0: + date += '0' * (6 - len(date[fp + 1:])) + date = datetime.fromisoformat(date) return int(datetime.timestamp(date) * 1000) @staticmethod diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py index d3b637373..b1734660c 100644 --- a/api/chalicelib/utils/jira_client.py +++ b/api/chalicelib/utils/jira_client.py @@ -5,22 +5,24 @@ import requests from jira import JIRA from jira.exceptions import JIRAError from requests.auth import HTTPBasicAuth +from starlette import status +from starlette.exceptions import HTTPException fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels" class JiraManager: - # retries = 5 retries = 0 def __init__(self, url, username, password, project_id=None): self._config = {"JIRA_PROJECT_ID": project_id, "JIRA_URL": url, "JIRA_USERNAME": username, "JIRA_PASSWORD": password} try: - self._jira = JIRA({'server': url}, basic_auth=(username, password), logging=True, max_retries=1) + self._jira = JIRA(url, basic_auth=(username, password), logging=True, max_retries=1) except Exception as e: print("!!! JIRA AUTH ERROR") print(e) + raise e def set_jira_project_id(self, project_id): self._config["JIRA_PROJECT_ID"] = project_id @@ -33,8 +35,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_projects() - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") projects_dict_list = [] for project in projects: projects_dict_list.append(self.__parser_project_info(project)) @@ -49,8 +51,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_project() - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") return self.__parser_project_info(project) def get_issues(self, sql: str, offset: int = 0): @@ -65,8 +67,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_issues(sql, offset) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") issue_dict_list = [] for issue in issues: @@ -85,8 +87,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_issue(issue_id) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") return self.__parser_issue_info(issue) def get_issue_v3(self, issue_id: str): @@ -105,8 +107,8 @@ class JiraManager: if self.retries > 0: time.sleep(1) return self.get_issue_v3(issue_id) - print(f"=>Error {e}") - raise e + print(f"=>Exception {e}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: get issue error") return self.__parser_issue_info(issue.json()) def create_issue(self, issue_dict): @@ -119,8 +121,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.create_issue(issue_dict) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") def close_issue(self, issue): try: @@ -131,8 +133,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.close_issue(issue) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") def assign_issue(self, issue_id, account_id) -> bool: try: @@ -142,8 +144,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.assign_issue(issue_id, account_id) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") def add_comment(self, issue_id: str, comment: str): try: @@ -153,8 +155,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.add_comment(issue_id, comment) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") return self.__parser_comment_info(comment) def add_comment_v3(self, issue_id: str, comment: str): @@ -190,8 +192,8 @@ class JiraManager: if self.retries > 0: time.sleep(1) return self.add_comment_v3(issue_id, comment) - print(f"=>Error {e}") - raise e + print(f"=>Exception {e}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: comment error") return self.__parser_comment_info(comment_response.json()) def get_comments(self, issueKey): @@ -206,8 +208,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_comments(issueKey) - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") def get_meta(self): meta = {} @@ -217,14 +219,16 @@ class JiraManager: def get_assignable_users(self): try: - users = self._jira.search_assignable_users_for_issues('', project=self._config['JIRA_PROJECT_ID']) + users = self._jira.search_assignable_users_for_issues(project=self._config['JIRA_PROJECT_ID'], query="*") except JIRAError as e: self.retries -= 1 if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_assignable_users() - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + if e.status_code == 401: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="JIRA: 401 Unauthorized") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") users_dict = [] for user in users: users_dict.append({ @@ -244,8 +248,8 @@ class JiraManager: if (e.status_code // 100) == 4 and self.retries > 0: time.sleep(1) return self.get_issue_types() - print(f"=>Error {e.text}") - raise e + print(f"=>Exception {e.text}") + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}") types_dict = [] for type in types: if not type.subtask and not type.name.lower() == "epic": diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 6e4118689..3d60dda5c 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -1,3 +1,4 @@ +import time from threading import Semaphore import psycopg2 @@ -9,7 +10,8 @@ _PG_CONFIG = {"host": config("pg_host"), "database": config("pg_dbname"), "user": config("pg_user"), "password": config("pg_password"), - "port": config("pg_port", cast=int)} + "port": config("pg_port", cast=int), + "application_name": config("APP_NAME", default="PY")} PG_CONFIG = dict(_PG_CONFIG) if config("pg_timeout", cast=int, default=0) > 0: PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}" @@ -36,9 +38,14 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool): postgreSQL_pool: ORThreadedConnectionPool = None +RETRY_MAX = config("PG_RETRY_MAX", cast=int, default=50) +RETRY_INTERVAL = config("PG_RETRY_INTERVAL", cast=int, default=2) +RETRY = 0 + def make_pool(): global postgreSQL_pool + global RETRY if postgreSQL_pool is not None: try: postgreSQL_pool.closeall() @@ -50,7 +57,13 @@ def make_pool(): print("Connection pool created successfully") except (Exception, psycopg2.DatabaseError) as error: print("Error while connecting to PostgreSQL", error) - raise error + if RETRY < RETRY_MAX: + RETRY += 1 + print(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}") + time.sleep(RETRY_INTERVAL) + make_pool() + else: + raise error make_pool() @@ -64,6 +77,8 @@ class PostgresClient: def __init__(self, long_query=False): self.long_query = long_query if long_query: + long_config = dict(_PG_CONFIG) + long_config["application_name"] += "-LONG" self.connection = psycopg2.connect(**_PG_CONFIG) else: self.connection = postgreSQL_pool.getconn() diff --git a/api/entrypoint.sh b/api/entrypoint.sh index a092737be..a41427181 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,2 +1,5 @@ #!/bin/bash -uvicorn app:app --host 0.0.0.0 --reload +cd sourcemap-reader +nohup npm start &> /tmp/sourcemap-reader.log & +cd .. +uvicorn app:app --host 0.0.0.0 --reload --proxy-headers diff --git a/ee/api/entrypoint.sh b/api/entrypoint_alerts.sh similarity index 98% rename from ee/api/entrypoint.sh rename to api/entrypoint_alerts.sh index a092737be..5f15a78b7 100755 --- a/ee/api/entrypoint.sh +++ b/api/entrypoint_alerts.sh @@ -1,2 +1,3 @@ #!/bin/bash + uvicorn app:app --host 0.0.0.0 --reload diff --git a/api/requirements.txt b/api/requirements.txt index 4af962f4f..198b535dd 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -4,11 +4,11 @@ boto3==1.16.1 pyjwt==1.7.1 psycopg2-binary==2.8.6 elasticsearch==7.9.1 -jira==2.0.0 +jira==3.1.1 -fastapi==0.74.1 +fastapi==0.75.0 uvicorn[standard]==0.17.5 python-decouple==3.6 pydantic[email]==1.8.2 diff --git a/api/routers/base.py b/api/routers/base.py index ff7fe165f..5c665b2d1 100644 --- a/api/routers/base.py +++ b/api/routers/base.py @@ -2,11 +2,13 @@ from fastapi import APIRouter, Depends from auth.auth_apikey import APIKeyAuth from auth.auth_jwt import JWTAuth +from auth.auth_project import ProjectAuthorizer from or_dependencies import ORRoute def get_routers() -> (APIRouter, APIRouter, APIRouter): public_app = APIRouter(route_class=ORRoute) - app = APIRouter(dependencies=[Depends(JWTAuth())], route_class=ORRoute) - app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth())], route_class=ORRoute) + app = APIRouter(dependencies=[Depends(JWTAuth()), Depends(ProjectAuthorizer("projectId"))], route_class=ORRoute) + app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth()), Depends(ProjectAuthorizer("projectKey"))], + route_class=ORRoute) return public_app, app, app_apikey diff --git a/api/routers/core.py b/api/routers/core.py index 97a749429..813577b88 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,7 +1,7 @@ from typing import Union from decouple import config -from fastapi import Depends, Body +from fastapi import Depends, Body, BackgroundTasks import schemas from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ @@ -21,8 +21,10 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) -def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): +def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, @@ -30,12 +32,14 @@ def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.Cu if data is None: return {"errors": ["session not found"]} if data.get("inDB"): - sessions_favorite_viewed.view_session(project_id=projectId, user_id=context.user_id, session_id=sessionId) + background_tasks.add_task(sessions_favorite_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) return { 'data': data } +@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"]) def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): @@ -44,6 +48,7 @@ def add_remove_favorite_session2(projectId: int, sessionId: int, session_id=sessionId)} +@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"]) @app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"]) def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, @@ -56,6 +61,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = } +@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) @app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -67,6 +73,7 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, } +@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) @app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) def assign_session(projectId: int, sessionId: int, issueId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -79,6 +86,8 @@ def assign_session(projectId: int, sessionId: int, issueId: str, } +@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) @app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) @app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), @@ -387,7 +396,7 @@ def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(O def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) - if error is not None: + if error is not None and integration is None: return {"data": {}} return {"data": integration.get_obfuscated()} @@ -399,7 +408,7 @@ def add_edit_jira_cloud(data: schemas.JiraGithubSchema = Body(...), error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, tenant_id=context.tenant_id, user_id=context.user_id) - if error is not None: + if error is not None and integration is None: return error data.provider = integration_jira_cloud.PROVIDER return {"data": integration.add_edit(data=data.dict())} @@ -422,7 +431,7 @@ def add_edit_github(data: schemas.JiraGithubSchema = Body(...), def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, user_id=context.user_id) - if error is not None: + if error is not None and integration is None: return error return {"data": integration.delete()} @@ -825,6 +834,21 @@ def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentCo return {'data': data} +@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"]) +def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) + if data is None: + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_favorite_viewed.view_session, project_id=projectId, + user_id=context.user_id, session_id=sessionId) + return {'data': data} + + @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -889,12 +913,14 @@ def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, @app.get('/{projectId}/errors/{errorId}', tags=['errors']) -def errors_get_details(projectId: int, errorId: str, density24: int = 24, density30: int = 30, +def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, + density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, **{"density24": density24, "density30": density30}) if data.get("data") is not None: - errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context.user_id, error_id=errorId) + background_tasks.add_task(errors_favorite_viewed.viewed_error, project_id=projectId, user_id=context.user_id, + error_id=errorId) return data @@ -1065,78 +1091,6 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), user_id=context.user_id) -@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) -def try_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.merged_live(project_id=projectId, data=data)} - - -@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) -def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) - - -@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) -def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} - - -@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id) - if data is None: - return {"errors": ["custom metric not found"]} - return {"data": data} - - -@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) -def get_custom_metric_sessions(projectId: int, metric_id: int, - data: schemas.CustomMetricSessionsPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) - if data is None: - return {"errors": ["custom metric not found"]} - return {"data": data} - - -@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) -def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=metric_id, - data=data) - if data is None: - return {"errors": ["custom metric not found"]} - return {"data": data} - - -@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) - if data is None: - return {"errors": ["custom metric not found"]} - return {"data": data} - - -@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) -def update_custom_metric_state(projectId: int, metric_id: int, - data: schemas.UpdateCustomMetricsStatusSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return { - "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, - status=data.active)} - - -@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} - - @app.post('/{projectId}/saved_search', tags=["savedSearch"]) @app.put('/{projectId}/saved_search', tags=["savedSearch"]) def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...), diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index c149266b5..e7e87e76c 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -51,8 +51,6 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") - c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True) c["smtp"] = helper.has_smtp() c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] @@ -93,10 +91,9 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/projects/{projectId}', tags=['projects']) -def get_project(projectId: int, last_tracker_version: Optional[str] = None, - context: schemas.CurrentContext = Depends(OR_context)): +def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True, - include_gdpr=True, last_tracker_version=last_tracker_version) + include_gdpr=True) if data is None: return {"errors": ["project not found"]} return {"data": data} @@ -219,15 +216,12 @@ def get_client(context: schemas.CurrentContext = Depends(OR_context)): r = tenants.get_by_tenant_id(context.tenant_id) if r is not None: r.pop("createdAt") - r["projects"] = projects.get_projects(tenant_id=context.tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True) return { 'data': r } @app.get('/projects', tags=['projects']) -def get_projects(last_tracker_version: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)): +def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, - stack_integrations=True, version=True, - last_tracker_version=last_tracker_version)} + stack_integrations=True)} diff --git a/api/routers/subs/dashboard.py b/api/routers/subs/dashboard.py index 169893693..b167c4231 100644 --- a/api/routers/subs/dashboard.py +++ b/api/routers/subs/dashboard.py @@ -1,7 +1,7 @@ from fastapi import Body import schemas -from chalicelib.core import dashboard +from chalicelib.core import metrics from chalicelib.core import metadata from chalicelib.utils import helper from routers.base import get_routers @@ -20,61 +20,61 @@ def get_metadata_map(projectId: int): @app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())} + return {"data": metrics.get_processed_sessions(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_errors(project_id=projectId, **data.dict())} + return {"data": metrics.get_errors(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_errors_trend(project_id=projectId, **data.dict())} + return {"data": metrics.get_errors_trend(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_application_activity(project_id=projectId, **data.dict())} + return {"data": metrics.get_application_activity(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_page_metrics(project_id=projectId, **data.dict())} + return {"data": metrics.get_page_metrics(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_user_activity(project_id=projectId, **data.dict())} + return {"data": metrics.get_user_activity(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_performance(project_id=projectId, **data.dict())} + return {"data": metrics.get_performance(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_slowest_images(project_id=projectId, **data.dict())} + return {"data": metrics.get_slowest_images(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **data.dict())} + return {"data": metrics.get_missing_resources_trend(project_id=projectId, **data.dict())} @app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_network(project_id=projectId, **data.dict())} + return {"data": metrics.get_network(project_id=projectId, **data.dict())} @app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"]) @@ -85,20 +85,20 @@ def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = q = '^' + q if widget in ['performance']: - data = dashboard.search(q, type, project_id=projectId, + data = metrics.search(q, type, project_id=projectId, platform=platform, performance=True) elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', 'impacted_sessions_by_slow_pages', 'pages_response_time']: - data = dashboard.search(q, type, project_id=projectId, + data = metrics.search(q, type, project_id=projectId, platform=platform, pages_only=True) elif widget in ['resources_loading_time']: - data = dashboard.search(q, type, project_id=projectId, + data = metrics.search(q, type, project_id=projectId, platform=platform, performance=False) elif widget in ['time_between_events', 'events']: - data = dashboard.search(q, type, project_id=projectId, + data = metrics.search(q, type, project_id=projectId, platform=platform, performance=False, events_only=True) elif widget in ['metadata']: - data = dashboard.search(q, None, project_id=projectId, + data = metrics.search(q, None, project_id=projectId, platform=platform, metadata=True, key=key) else: return {"errors": [f"unsupported widget: {widget}"]} @@ -109,210 +109,210 @@ def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = @app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_slowest_resources(project_id=projectId, **data.dict())} + return {"data": metrics.get_slowest_resources(project_id=projectId, **data.dict())} # 2 @app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_resources_loading_time(project_id=projectId, **data.dict())} + return {"data": metrics.get_resources_loading_time(project_id=projectId, **data.dict())} # 3 @app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())} + return {"data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())} # 4 @app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **data.dict())} + return {"data": metrics.get_busiest_time_of_day(project_id=projectId, **data.dict())} # 5 @app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_sessions_location(project_id=projectId, **data.dict())} + return {"data": metrics.get_sessions_location(project_id=projectId, **data.dict())} # 6 @app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_speed_index_location(project_id=projectId, **data.dict())} + return {"data": metrics.get_speed_index_location(project_id=projectId, **data.dict())} # 7 @app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())} + return {"data": metrics.get_pages_response_time(project_id=projectId, **data.dict())} # 8 @app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **data.dict())} + return {"data": metrics.get_pages_response_time_distribution(project_id=projectId, **data.dict())} # 9 @app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_top_metrics(project_id=projectId, **data.dict())} + return {"data": metrics.get_top_metrics(project_id=projectId, **data.dict())} # 10 @app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_time_to_render(project_id=projectId, **data.dict())} + return {"data": metrics.get_time_to_render(project_id=projectId, **data.dict())} # 11 @app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())} + return {"data": metrics.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())} # 12 @app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())} + return {"data": metrics.get_memory_consumption(project_id=projectId, **data.dict())} # 12.1 @app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_avg_fps(project_id=projectId, **data.dict())} + return {"data": metrics.get_avg_fps(project_id=projectId, **data.dict())} # 12.2 @app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())} + return {"data": metrics.get_avg_cpu(project_id=projectId, **data.dict())} # 13 @app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_crashes(project_id=projectId, **data.dict())} + return {"data": metrics.get_crashes(project_id=projectId, **data.dict())} # 14 @app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_domains_errors(project_id=projectId, **data.dict())} + return {"data": metrics.get_domains_errors(project_id=projectId, **data.dict())} # 14.1 @app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **data.dict())} + return {"data": metrics.get_domains_errors_4xx(project_id=projectId, **data.dict())} # 14.2 @app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **data.dict())} + return {"data": metrics.get_domains_errors_5xx(project_id=projectId, **data.dict())} # 15 @app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_slowest_domains(project_id=projectId, **data.dict())} + return {"data": metrics.get_slowest_domains(project_id=projectId, **data.dict())} # 16 @app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_errors_per_domains(project_id=projectId, **data.dict())} + return {"data": metrics.get_errors_per_domains(project_id=projectId, **data.dict())} # 17 @app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **data.dict())} + return {"data": metrics.get_sessions_per_browser(project_id=projectId, **data.dict())} # 18 @app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_calls_errors(project_id=projectId, **data.dict())} + return {"data": metrics.get_calls_errors(project_id=projectId, **data.dict())} # 18.1 @app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **data.dict())} + return {"data": metrics.get_calls_errors_4xx(project_id=projectId, **data.dict())} # 18.2 @app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **data.dict())} + return {"data": metrics.get_calls_errors_5xx(project_id=projectId, **data.dict())} # 19 @app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_errors_per_type(project_id=projectId, **data.dict())} + return {"data": metrics.get_errors_per_type(project_id=projectId, **data.dict())} # 20 @app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_resources_by_party(project_id=projectId, **data.dict())} + return {"data": metrics.get_resources_by_party(project_id=projectId, **data.dict())} # 21 @app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **data.dict())} + return {"data": metrics.resource_type_vs_response_end(project_id=projectId, **data.dict())} # 22 @app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **data.dict())} + return {"data": metrics.get_resources_vs_visually_complete(project_id=projectId, **data.dict())} # 23 @app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())} + return {"data": metrics.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())} # 24 @app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **data.dict())} + return {"data": metrics.get_resources_count_by_type(project_id=projectId, **data.dict())} # # 25 @@ -325,22 +325,74 @@ def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPa @app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) @app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): - return {"data": [ - *helper.explode_widget(key="count_sessions", - data=dashboard.get_processed_sessions(project_id=projectId, **data.dict())), - *helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()), - "chart": dashboard.get_performance(project_id=projectId, **data.dict()) + results = [ + {"key": "count_sessions", + "data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}, + *helper.explode_widget(data={**metrics.get_application_activity(project_id=projectId, **data.dict()), + "chart": metrics.get_performance(project_id=projectId, **data.dict()) .get("chart", [])}), - *helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())), - *helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())), - *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()), - key="avg_pages_dom_buildtime"), - *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()), - key="avg_pages_response_time"), - *helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())), - *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()), - key="avg_time_to_render"), - *helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **data.dict())), - *helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **data.dict())), - *helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **data.dict())), - ]} + *helper.explode_widget(data=metrics.get_page_metrics(project_id=projectId, **data.dict())), + *helper.explode_widget(data=metrics.get_user_activity(project_id=projectId, **data.dict())), + {"key": "avg_pages_dom_buildtime", + "data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}, + {"key": "avg_pages_response_time", + "data": metrics.get_pages_response_time(project_id=projectId, **data.dict()) + }, + *helper.explode_widget(metrics.get_top_metrics(project_id=projectId, **data.dict())), + {"key": "avg_time_to_render", "data": metrics.get_time_to_render(project_id=projectId, **data.dict())}, + {"key": "avg_used_js_heap_size", "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}, + {"key": "avg_cpu", "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_fps, + "data": metrics.get_avg_fps(project_id=projectId, **data.dict())} + ] + results = sorted(results, key=lambda r: r["key"]) + return {"data": results} + + +@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"]) +def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + results = [ + {"key": schemas.TemplatePredefinedKeys.count_sessions, + "data": metrics.get_processed_sessions(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_image_load_time, + "data": metrics.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_page_load_time, + "data": metrics.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_request_load_time, + "data": metrics.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start, + "data": metrics.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel, + "data": metrics.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_visited_pages, + "data": metrics.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_session_duration, + "data": metrics.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime, + "data": metrics.get_pages_dom_build_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_pages_response_time, + "data": metrics.get_pages_response_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_response_time, + "data": metrics.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_first_paint, + "data": metrics.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded, + "data": metrics.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_till_first_bit, + "data": metrics.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive, + "data": metrics.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.count_requests, + "data": metrics.get_top_metrics_count_requests(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_time_to_render, + "data": metrics.get_time_to_render(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size, + "data": metrics.get_memory_consumption(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_cpu, + "data": metrics.get_avg_cpu(project_id=projectId, **data.dict())}, + {"key": schemas.TemplatePredefinedKeys.avg_fps, + "data": metrics.get_avg_fps(project_id=projectId, **data.dict())} + ] + results = sorted(results, key=lambda r: r["key"]) + return {"data": results} diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py new file mode 100644 index 000000000..a33b75d0b --- /dev/null +++ b/api/routers/subs/metrics.py @@ -0,0 +1,190 @@ +from fastapi import Body, Depends + +import schemas +from chalicelib.core import dashboards, custom_metrics +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app.post('/{projectId}/dashboards', tags=["dashboard"]) +@app.put('/{projectId}/dashboards', tags=["dashboard"]) +def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) + + +@app.get('/{projectId}/dashboards', tags=["dashboard"]) +def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) + if data is None: + return {"errors": ["dashboard not found"]} + return {"data": data} + + +@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id, + dashboard_id=dashboardId, data=data)} + + +@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +def delete_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId) + + +@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"]) +def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +def add_widget_to_dashboard(projectId: int, dashboardId: int, + data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + data=data)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, + data: schemas.CreateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, + dashboard_id=dashboardId, data=data)} + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, + data: schemas.UpdateWidgetPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId, data=data) + + +@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId) + + +@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"]) +def get_widget_chart(projectId: int, dashboardId: int, widgetId: int, + data: schemas.CustomMetricChartPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, + widget_id=widgetId, data=data) + if data is None: + return {"errors": ["widget not found"]} + return {"data": data} + + +@app.get('/{projectId}/metrics/templates', tags=["dashboard"]) +def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": dashboards.get_templates(project_id=projectId, user_id=context.user_id)} + + +@app.post('/{projectId}/metrics/try', tags=["dashboard"]) +@app.put('/{projectId}/metrics/try', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.merged_live(project_id=projectId, data=data)} + + +@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) +def try_custom_metric_sessions(projectId: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) + return {"data": data} + + +@app.post('/{projectId}/metrics', tags=["dashboard"]) +@app.put('/{projectId}/metrics', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) +def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) + + +@app.get('/{projectId}/metrics', tags=["dashboard"]) +@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) +def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} + + +@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) +def get_custom_metric_sessions(projectId: int, metric_id: int, + data: schemas.CustomMetricSessionsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) +def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = dashboards.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + if data is None: + return {"errors": ["custom metric not found"]} + return {"data": data} + + +@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +def update_custom_metric_state(projectId: int, metric_id: int, + data: schemas.UpdateCustomMetricsStatusSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, + status=data.active)} + + +@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} diff --git a/api/routers/app/v1_api.py b/api/routers/subs/v1_api.py similarity index 100% rename from api/routers/app/v1_api.py rename to api/routers/subs/v1_api.py diff --git a/api/schemas.py b/api/schemas.py index 77cb78c05..f1daef481 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -588,7 +588,7 @@ class SessionSearchFilterSchema(__MixedSearchFilter): Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...) type: FilterType = Field(...) operator: Union[SearchEventOperator, MathOperator] = Field(...) - source: Optional[Union[ErrorSource, str]] = Field(default=ErrorSource.js_exception) + source: Optional[Union[ErrorSource, str]] = Field(default=None) @root_validator def filter_validator(cls, values): @@ -613,7 +613,12 @@ class SessionSearchFilterSchema(__MixedSearchFilter): return values -class SessionsSearchPayloadSchema(BaseModel): +class _PaginatedSchema(BaseModel): + limit: int = Field(default=200, gt=0, le=200) + page: int = Field(default=1, gt=0) + + +class SessionsSearchPayloadSchema(_PaginatedSchema): events: List[_SessionSearchEventSchema] = Field([]) filters: List[SessionSearchFilterSchema] = Field([]) startDate: int = Field(None) @@ -622,8 +627,6 @@ class SessionsSearchPayloadSchema(BaseModel): order: Literal["asc", "desc"] = Field(default="desc") events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) group_by_user: bool = Field(default=False) - limit: int = Field(default=200, gt=0, le=200) - page: int = Field(default=1, gt=0) bookmarked: bool = Field(default=False) class Config: @@ -776,6 +779,7 @@ class CustomMetricCreateSeriesSchema(BaseModel): class MetricTimeseriesViewType(str, Enum): line_chart = "lineChart" progress = "progress" + area_chart = "areaChart" class MetricTableViewType(str, Enum): @@ -802,9 +806,10 @@ class TimeseriesMetricOfType(str, Enum): session_count = "sessionCount" -class CustomMetricSessionsPayloadSchema(FlatSessionsSearch): - startDate: int = Field(TimeUTC.now(-7)) - endDate: int = Field(TimeUTC.now()) +class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema): + startTimestamp: int = Field(TimeUTC.now(-7)) + endTimestamp: int = Field(TimeUTC.now()) + series: Optional[List[CustomMetricCreateSeriesSchema]] = Field(default=None) class Config: alias_generator = attribute_to_camel_case @@ -817,10 +822,10 @@ class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema): alias_generator = attribute_to_camel_case -class CreateCustomMetricsSchema(CustomMetricChartPayloadSchema): +class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema): name: str = Field(...) - series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) - is_public: bool = Field(default=True, const=True) + series: List[CustomMetricCreateSeriesSchema] = Field(...) + is_public: bool = Field(default=True) view_type: Union[MetricTimeseriesViewType, MetricTableViewType] = Field(MetricTimeseriesViewType.line_chart) metric_type: MetricType = Field(MetricType.timeseries) metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id) @@ -858,6 +863,10 @@ class CreateCustomMetricsSchema(CustomMetricChartPayloadSchema): alias_generator = attribute_to_camel_case +class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema): + series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1) + + class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema): series_id: Optional[int] = Field(None) @@ -875,3 +884,99 @@ class UpdateCustomMetricsStatusSchema(BaseModel): class SavedSearchSchema(FunnelSchema): filter: FlatSessionsSearchPayloadSchema = Field([]) + + +class CreateDashboardSchema(BaseModel): + name: str = Field(..., min_length=1) + is_public: bool = Field(default=False) + is_pinned: bool = Field(default=False) + metrics: Optional[List[int]] = Field(default=[]) + + class Config: + alias_generator = attribute_to_camel_case + + +class EditDashboardSchema(CreateDashboardSchema): + is_public: Optional[bool] = Field(default=None) + is_pinned: Optional[bool] = Field(default=None) + + +class UpdateWidgetPayloadSchema(BaseModel): + config: dict = Field(default={}) + + class Config: + alias_generator = attribute_to_camel_case + + +class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema): + metric_id: int = Field(...) + + class Config: + alias_generator = attribute_to_camel_case + + +# these values should match the keys in metrics table +class TemplatePredefinedKeys(str, Enum): + count_sessions = "count_sessions" + avg_request_load_time = "avg_request_load_time" + avg_page_load_time = "avg_page_load_time" + avg_image_load_time = "avg_image_load_time" + avg_dom_content_load_start = "avg_dom_content_load_start" + avg_first_contentful_pixel = "avg_first_contentful_pixel" + avg_visited_pages = "avg_visited_pages" + avg_session_duration = "avg_session_duration" + avg_pages_dom_buildtime = "avg_pages_dom_buildtime" + avg_pages_response_time = "avg_pages_response_time" + avg_response_time = "avg_response_time" + avg_first_paint = "avg_first_paint" + avg_dom_content_loaded = "avg_dom_content_loaded" + avg_till_first_bit = "avg_till_first_byte" + avg_time_to_interactive = "avg_time_to_interactive" + count_requests = "count_requests" + avg_time_to_render = "avg_time_to_render" + avg_used_js_heap_size = "avg_used_js_heap_size" + avg_cpu = "avg_cpu" + avg_fps = "avg_fps" + impacted_sessions_by_js_errors = "impacted_sessions_by_js_errors" + domains_errors_4xx = "domains_errors_4xx" + domains_errors_5xx = "domains_errors_5xx" + errors_per_domains = "errors_per_domains" + calls_errors = "calls_errors" + errors_by_type = "errors_per_type" + errors_by_origin = "resources_by_party" + speed_index_by_location = "speed_location" + slowest_domains = "slowest_domains" + sessions_per_browser = "sessions_per_browser" + time_to_render = "time_to_render" + impacted_sessions_by_slow_pages = "impacted_sessions_by_slow_pages" + memory_consumption = "memory_consumption" + cpu_load = "cpu" + frame_rate = "fps" + crashes = "crashes" + resources_vs_visually_complete = "resources_vs_visually_complete" + pages_dom_buildtime = "pages_dom_buildtime" + pages_response_time = "pages_response_time" + pages_response_time_distribution = "pages_response_time_distribution" + missing_resources = "missing_resources" + slowest_resources = "slowest_resources" + resources_fetch_time = "resources_loading_time" + resource_type_vs_response_end = "resource_type_vs_response_end" + resources_count_by_type = "resources_count_by_type" + + +class TemplatePredefinedUnits(str, Enum): + millisecond = "ms" + minute = "min" + memory = "mb" + frame = "f/s" + percentage = "%" + count = "count" + + +class CustomMetricAndTemplate(BaseModel): + is_template: bool = Field(...) + project_id: Optional[int] = Field(...) + predefined_key: Optional[TemplatePredefinedKeys] = Field(...) + + class Config: + alias_generator = attribute_to_camel_case diff --git a/backend/Dockerfile b/backend/Dockerfile index 5cefd4cb4..b7a494f86 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.13-alpine3.10 AS prepare +FROM golang:1.18-alpine3.15 AS prepare RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash @@ -27,6 +27,7 @@ ENV TZ=UTC \ HTTP_PORT=80 \ BEACON_SIZE_LIMIT=7000000 \ KAFKA_USE_SSL=true \ + KAFKA_MAX_POLL_INTERVAL_MS=400000 \ REDIS_STREAMS_MAX_LEN=3000 \ TOPIC_RAW_WEB=raw \ TOPIC_RAW_IOS=raw-ios \ diff --git a/backend/go.mod b/backend/go.mod index ab98ca444..6588529a8 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -1,14 +1,12 @@ module openreplay/backend -go 1.13 +go 1.18 require ( cloud.google.com/go/logging v1.4.2 github.com/ClickHouse/clickhouse-go v1.4.3 - github.com/Masterminds/squirrel v1.5.0 github.com/aws/aws-sdk-go v1.35.23 github.com/btcsuite/btcutil v1.0.2 - github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect github.com/elastic/go-elasticsearch/v7 v7.13.1 github.com/go-redis/redis v6.15.9+incompatible github.com/google/uuid v1.1.2 @@ -16,14 +14,47 @@ require ( github.com/jackc/pgconn v1.6.0 github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451 github.com/jackc/pgx/v4 v4.6.0 - github.com/klauspost/compress v1.11.9 // indirect github.com/klauspost/pgzip v1.2.5 - github.com/lib/pq v1.2.0 github.com/oschwald/maxminddb-golang v1.7.0 github.com/pkg/errors v0.9.1 github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe + golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 google.golang.org/api v0.50.0 gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 - +) + +require ( + cloud.google.com/go v0.84.0 // indirect + github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 // indirect + github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect + github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/go-cmp v0.5.6 // indirect + github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.0.2 // indirect + github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 // indirect + github.com/jackc/pgtype v1.3.0 // indirect + github.com/jackc/puddle v1.1.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jstemmer/go-junit-report v0.9.1 // indirect + github.com/klauspost/compress v1.11.9 // indirect + go.opencensus.io v0.23.0 // indirect + golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 // indirect + golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect + golang.org/x/mod v0.4.2 // indirect + golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 // indirect + golang.org/x/text v0.3.6 // indirect + golang.org/x/tools v0.1.4 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84 // indirect + google.golang.org/grpc v1.38.0 // indirect + google.golang.org/protobuf v1.26.0 // indirect + gopkg.in/yaml.v2 v2.2.8 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 8d538a0b4..607936204 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -46,8 +46,6 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= -github.com/Masterminds/squirrel v1.5.0 h1:JukIZisrUXadA9pl3rMkjhiamxiB0cXiu+HGp/Y8cY8= -github.com/Masterminds/squirrel v1.5.0/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= github.com/aws/aws-sdk-go v1.35.23 h1:SCP0d0XvyJTDmfnHEQPvBaYi3kea1VNUo7uQmkVgFts= github.com/aws/aws-sdk-go v1.35.23/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= @@ -75,8 +73,8 @@ github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnht github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/confluentinc/confluent-kafka-go v1.5.2 h1:l+qt+a0Okmq0Bdr1P55IX4fiwFJyg0lZQmfHkAFkv7E= -github.com/confluentinc/confluent-kafka-go v1.5.2/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg= +github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM= +github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= @@ -93,7 +91,6 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -135,7 +132,6 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -152,11 +148,9 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= @@ -184,7 +178,6 @@ github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -203,7 +196,6 @@ github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye47 github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= @@ -219,7 +211,6 @@ github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCM github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= github.com/jackc/pgtype v1.3.0 h1:l8JvKrby3RI7Kg3bYEeU9TA4vqC38QDpFCfcrC7KuN0= github.com/jackc/pgtype v1.3.0/go.mod h1:b0JqxHvPmljG+HQ5IsvQ0yqeSi4nGcDTVjFoiLDb0Ik= -github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o= github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= @@ -254,10 +245,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= -github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= -github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= -github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= @@ -682,8 +669,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/confluentinc/confluent-kafka-go.v1 v1.5.2 h1:g0WBLy6fobNUU8W/e9zx6I0Yl79Ya+BDW1NwzAlTiiQ= -gopkg.in/confluentinc/confluent-kafka-go.v1 v1.5.2/go.mod h1:ZdI3yfYmdNSLQPNCpO1y00EHyWaHG5EnQEyL/ntAegY= +gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0 h1:+RlmciBLDd/XwM1iudiG3HtCg45purnsOxEoY/+JZdQ= +gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0/go.mod h1:ZdI3yfYmdNSLQPNCpO1y00EHyWaHG5EnQEyL/ntAegY= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= diff --git a/backend/pkg/db/cache/messages_common.go b/backend/pkg/db/cache/messages-common.go similarity index 66% rename from backend/pkg/db/cache/messages_common.go rename to backend/pkg/db/cache/messages-common.go index 3983982fe..8ca7b2f85 100644 --- a/backend/pkg/db/cache/messages_common.go +++ b/backend/pkg/db/cache/messages-common.go @@ -28,30 +28,6 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error { return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash) } -func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error { - if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserID = &userID.Value - return nil -} - -func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error { - if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil { - return err - } - session, err := c.GetSession(sessionID) - if err != nil { - return err - } - session.UserAnonymousID = &userAnonymousID.Value - return nil -} - func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error { session, err := c.GetSession(sessionID) if err != nil { diff --git a/backend/pkg/db/cache/messages_ios.go b/backend/pkg/db/cache/messages-ios.go similarity index 100% rename from backend/pkg/db/cache/messages_ios.go rename to backend/pkg/db/cache/messages-ios.go diff --git a/backend/pkg/db/cache/messages_web.go b/backend/pkg/db/cache/messages-web.go similarity index 100% rename from backend/pkg/db/cache/messages_web.go rename to backend/pkg/db/cache/messages-web.go diff --git a/backend/pkg/db/cache/pg_cache.go b/backend/pkg/db/cache/pg-cache.go similarity index 100% rename from backend/pkg/db/cache/pg_cache.go rename to backend/pkg/db/cache/pg-cache.go diff --git a/backend/pkg/db/postgres/alert.go b/backend/pkg/db/postgres/alert.go deleted file mode 100644 index 964977bd3..000000000 --- a/backend/pkg/db/postgres/alert.go +++ /dev/null @@ -1,228 +0,0 @@ -package postgres - -import ( - "database/sql" - "errors" - "fmt" - sq "github.com/Masterminds/squirrel" - "log" - "strconv" - "time" -) - -type TimeString sql.NullString -type query struct { - Left string `db:"query.left" json:"left"` - Operator string `db:"query.operator" json:"operator"` - Right float64 `db:"query.right" json:"right"` -} -type options struct { - RenotifyInterval int64 `db:"options.renotifyInterval" json:"renotifyInterval"` - LastNotification int64 `db:"options.lastNotification" json:"lastNotification;omitempty"` - CurrentPeriod int64 `db:"options.currentPeriod" json:"currentPeriod"` - PreviousPeriod int64 `db:"options.previousPeriod" json:"previousPeriod;omitempty"` - Message []map[string]string `db:"options.message" json:"message;omitempty"` - Change string `db:"options.change" json:"change;omitempty"` -} -type Alert struct { - AlertID uint32 `db:"alert_id" json:"alert_id"` - ProjectID uint32 `db:"project_id" json:"project_id"` - Name string `db:"name" json:"name"` - Description sql.NullString `db:"description" json:"description"` - Active bool `db:"active" json:"active"` - DetectionMethod string `db:"detection_method" json:"detection_method"` - Query query `db:"query" json:"query"` - DeletedAt *int64 `db:"deleted_at" json:"deleted_at"` - CreatedAt *int64 `db:"created_at" json:"created_at"` - Options options `db:"options" json:"options"` - TenantId uint32 `db:"tenant_id" json:"tenant_id"` -} - -func (pg *Conn) IterateAlerts(iter func(alert *Alert, err error)) error { - rows, err := pg.query(` - SELECT - alerts.alert_id, - alerts.project_id, - alerts.name, - alerts.description, - alerts.active, - alerts.detection_method, - alerts.query, - CAST(EXTRACT(epoch FROM alerts.deleted_at) * 1000 AS BIGINT) AS deleted_at, - CAST(EXTRACT(epoch FROM alerts.created_at) * 1000 AS BIGINT) AS created_at, - alerts.options, - 0 AS tenant_id - FROM public.alerts - WHERE alerts.active AND alerts.deleted_at ISNULL; - `) - if err != nil { - return err - } - defer rows.Close() - for rows.Next() { - a := new(Alert) - if err = rows.Scan( - &a.AlertID, - &a.ProjectID, - &a.Name, - &a.Description, - &a.Active, - &a.DetectionMethod, - &a.Query, - &a.DeletedAt, - &a.CreatedAt, - &a.Options, - &a.TenantId, - ); err != nil { - iter(nil, err) - continue - } - iter(a, nil) - } - - if err = rows.Err(); err != nil { - return err - } - return nil -} - -func (pg *Conn) SaveLastNotification(allIds []uint32) error { - var paramrefs string - for _, v := range allIds { - paramrefs += strconv.Itoa(int(v)) + `,` - } - paramrefs = paramrefs[:len(paramrefs)-1] // remove last "," - q := "UPDATE public.Alerts SET options = options||'{\"lastNotification\":" + strconv.Itoa(int(time.Now().Unix()*1000)) + "}'::jsonb WHERE alert_id IN (" + paramrefs + ");" - //log.Println(q) - log.Println("Updating PG") - return pg.exec(q) -} - -type columnDefinition struct { - table string - formula string - condition string - group string -} - -var LeftToDb = map[string]columnDefinition{ - "performance.dom_content_loaded.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"}, - "performance.first_meaningful_paint.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"}, - "performance.page_load_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(load_time ,0))"}, - "performance.dom_build_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(dom_building_time,0))"}, - "performance.speed_index.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(speed_index,0))"}, - "performance.page_response_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(response_time,0))"}, - "performance.ttfb.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(first_paint_time,0))"}, - "performance.time_to_render.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(visually_complete,0))"}, - "performance.image_load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))", condition: "type='img'"}, - "performance.request_load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))", condition: "type='fetch'"}, - "resources.load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))"}, - "resources.missing.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(DISTINCT url_hostpath)", condition: "success= FALSE"}, - "errors.4xx_5xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100!=2"}, - "errors.4xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100=4"}, - "errors.5xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100=5"}, - "errors.javascript.impacted_sessions.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(DISTINCT session_id)", condition: "success= FALSE AND type='script'"}, - "performance.crashes.count": {table: "(SELECT *, start_ts AS timestamp FROM public.sessions WHERE errors_count > 0) AS sessions", formula: "COUNT(DISTINCT session_id)", condition: "errors_count > 0"}, - "errors.javascript.count": {table: "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", formula: "COUNT(DISTINCT session_id)", condition: "source='js_exception'"}, - "errors.backend.count": {table: "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", formula: "COUNT(DISTINCT session_id)", condition: "source!='js_exception'"}, -} - -//This is the frequency of execution for each threshold -var TimeInterval = map[int64]int64{ - 15: 3, - 30: 5, - 60: 10, - 120: 20, - 240: 30, - 1440: 60, -} - -func (a *Alert) CanCheck() bool { - now := time.Now().Unix() * 1000 - var repetitionBase int64 - - if repetitionBase = a.Options.CurrentPeriod; a.DetectionMethod == "change" && a.Options.CurrentPeriod > a.Options.PreviousPeriod { - repetitionBase = a.Options.PreviousPeriod - } - - if _, ok := TimeInterval[repetitionBase]; !ok { - log.Printf("repetitionBase: %d NOT FOUND", repetitionBase) - return false - } - return a.DeletedAt == nil && a.Active && - (a.Options.RenotifyInterval <= 0 || - a.Options.LastNotification <= 0 || - ((now - a.Options.LastNotification) > a.Options.RenotifyInterval*60*1000)) && - ((now-*a.CreatedAt)%(TimeInterval[repetitionBase]*60*1000)) < 60*1000 -} - -func (a *Alert) Build() (sq.SelectBuilder, error) { - colDef, ok := LeftToDb[a.Query.Left] - if !ok { - return sq.Select(), errors.New(fmt.Sprintf("!! unsupported metric '%s' from alert: %d:%s\n", a.Query.Left, a.AlertID, a.Name)) - } - - subQ := sq. - Select(colDef.formula + " AS value"). - From(colDef.table). - Where(sq.And{sq.Expr("project_id = $1 ", a.ProjectID), - sq.Expr(colDef.condition)}) - q := sq.Select(fmt.Sprint("value, coalesce(value,0)", a.Query.Operator, a.Query.Right, " AS valid")) - if len(colDef.group) > 0 { - subQ = subQ.Column(colDef.group + " AS group_value") - subQ = subQ.GroupBy(colDef.group) - q = q.Column("group_value") - } - - if a.DetectionMethod == "threshold" { - q = q.FromSelect(subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)), "stat") - } else if a.DetectionMethod == "change" { - if a.Options.Change == "change" { - if len(colDef.group) == 0 { - sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)).ToSql() - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-2*a.Options.CurrentPeriod*60), - }).ToSql() - sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")-(" + sub2 + ")) AS value").ToSql() - q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...) - } else { - subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)) - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-2*a.Options.CurrentPeriod*60), - }).ToSql() - sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...) - q = q.FromSelect(sub1, "stat") - } - } else if a.Options.Change == "percent" { - if len(colDef.group) == 0 { - sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)).ToSql() - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod*60-a.Options.CurrentPeriod*60), - }).ToSql() - sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")/(" + sub2 + ")-1)*100 AS value").ToSql() - q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...) - } else { - subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)) - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod*60-a.Options.CurrentPeriod*60), - }).ToSql() - sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...) - q = q.FromSelect(sub1, "stat") - } - } else { - return q, errors.New("unsupported change method") - } - - } else { - return q, errors.New("unsupported detection method") - } - return q, nil -} \ No newline at end of file diff --git a/backend/pkg/db/postgres/listener.go b/backend/pkg/db/postgres/listener.go index f90d83485..ef99c2c59 100644 --- a/backend/pkg/db/postgres/listener.go +++ b/backend/pkg/db/postgres/listener.go @@ -11,7 +11,6 @@ import ( type Listener struct { conn *pgx.Conn Integrations chan *Integration - Alerts chan *Alert Errors chan error } @@ -32,23 +31,6 @@ func NewIntegrationsListener(url string) (*Listener, error) { return listener, nil } -func NewAlertsListener(url string) (*Listener, error) { - conn, err := pgx.Connect(context.Background(), url) - if err != nil { - return nil, err - } - listener := &Listener{ - conn: conn, - Errors: make(chan error), - } - listener.Alerts = make(chan *Alert, 50) - if _, err := conn.Exec(context.Background(), "LISTEN alert"); err != nil { - return nil, err - } - go listener.listen() - return listener, nil -} - func (listener *Listener) listen() { for { notification, err := listener.conn.WaitForNotification(context.Background()) @@ -64,13 +46,6 @@ func (listener *Listener) listen() { } else { listener.Integrations <- integrationP } - case "alert": - alertP := new(Alert) - if err := json.Unmarshal([]byte(notification.Payload), alertP); err != nil { - listener.Errors <- fmt.Errorf("%v | Payload: %v", err, notification.Payload) - } else { - listener.Alerts <- alertP - } } } } diff --git a/backend/pkg/db/postgres/messages_common.go b/backend/pkg/db/postgres/messages-common.go similarity index 100% rename from backend/pkg/db/postgres/messages_common.go rename to backend/pkg/db/postgres/messages-common.go diff --git a/backend/pkg/db/postgres/messages_ios.go b/backend/pkg/db/postgres/messages-ios.go similarity index 100% rename from backend/pkg/db/postgres/messages_ios.go rename to backend/pkg/db/postgres/messages-ios.go diff --git a/backend/pkg/db/postgres/messages_web_stats.go b/backend/pkg/db/postgres/messages-web-stats.go similarity index 97% rename from backend/pkg/db/postgres/messages_web_stats.go rename to backend/pkg/db/postgres/messages-web-stats.go index 933442b0b..27a6272e2 100644 --- a/backend/pkg/db/postgres/messages_web_stats.go +++ b/backend/pkg/db/postgres/messages-web-stats.go @@ -35,7 +35,7 @@ func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrac } func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent) error { - host, _, err := url.GetURLParts(e.URL) + host, _, _, err := url.GetURLParts(e.URL) if err != nil { return err } diff --git a/backend/pkg/db/postgres/messages_web.go b/backend/pkg/db/postgres/messages-web.go similarity index 88% rename from backend/pkg/db/postgres/messages_web.go rename to backend/pkg/db/postgres/messages-web.go index 0d822cddd..197924fa9 100644 --- a/backend/pkg/db/postgres/messages_web.go +++ b/backend/pkg/db/postgres/messages-web.go @@ -55,7 +55,7 @@ func (conn *Conn) InsertWebUserAnonymousID(sessionID uint64, userAnonymousID *Us // TODO: fix column "dom_content_loaded_event_end" of relation "pages" func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { - host, path, err := url.GetURLParts(e.URL) + host, path, query, err := url.GetURLParts(e.URL) if err != nil { return err } @@ -64,20 +64,25 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { return err } defer tx.rollback() + // base_path is depricated if err := tx.exec(` INSERT INTO events.pages ( - session_id, message_id, timestamp, referrer, base_referrer, host, path, base_path, + session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, response_time, dom_building_time ) VALUES ( - $1, $2, $3, $4, $5, $6, $7, $8, + $1, $2, $3, + $4, $5, + $6, $7, $8, NULLIF($9, 0), NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0), NULLIF($15, 0), NULLIF($16, 0), NULLIF($17, 0), NULLIF($18, 0) ) `, - sessionID, e.MessageID, e.Timestamp, e.Referrer, url.DiscardURLQuery(e.Referrer), host, path, url.DiscardURLQuery(path), + sessionID, e.MessageID, e.Timestamp, + e.Referrer, url.DiscardURLQuery(e.Referrer), + host, path, query, e.DomContentLoadedEventEnd, e.LoadEventEnd, e.ResponseEnd, e.FirstPaint, e.FirstContentfulPaint, e.SpeedIndex, e.VisuallyComplete, e.TimeToInteractive, calcResponseTime(e), calcDomBuildingTime(e), @@ -109,7 +114,7 @@ func (conn *Conn) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { INSERT INTO events.clicks (session_id, message_id, timestamp, label, selector, url) (SELECT - $1, $2, $3, NULLIF($4, ''), $5, host || base_path + $1, $2, $3, NULLIF($4, ''), $5, host || path FROM events.pages WHERE session_id = $1 AND timestamp <= $3 ORDER BY timestamp DESC LIMIT 1 ) @@ -210,20 +215,27 @@ func (conn *Conn) InsertWebFetchEvent(sessionID uint64, savePayload bool, e *Fet request = &e.Request response = &e.Response } - conn.insertAutocompleteValue(sessionID, "REQUEST", url.DiscardURLQuery(e.URL)) + host, path, query, err := url.GetURLParts(e.URL) + conn.insertAutocompleteValue(sessionID, "REQUEST", path) + if err != nil { + return err + } return conn.batchQueue(sessionID, ` INSERT INTO events_common.requests ( - session_id, timestamp, - seq_index, url, duration, success, - request_body, response_body, status_code, method + session_id, timestamp, seq_index, + url, host, path, query, + request_body, response_body, status_code, method, + duration, success ) VALUES ( - $1, $2, - $3, $4, $5, $6, - $7, $8, $9::smallint, NULLIF($10, '')::http_method + $1, $2, $3, + $4, $5, $6, $7, + $8, $9, $10::smallint, NULLIF($11, '')::http_method, + $12, $13 ) ON CONFLICT DO NOTHING`, - sessionID, e.Timestamp, - getSqIdx(e.MessageID), e.URL, e.Duration, e.Status < 400, + sessionID, e.Timestamp, getSqIdx(e.MessageID), + e.URL, host, path, query, request, response, e.Status, url.EnsureMethod(e.Method), + e.Duration, e.Status < 400, ) } diff --git a/backend/pkg/db/postgres/unstarted_session.go b/backend/pkg/db/postgres/unstarted-session.go similarity index 100% rename from backend/pkg/db/postgres/unstarted_session.go rename to backend/pkg/db/postgres/unstarted-session.go diff --git a/backend/pkg/env/worker_id.go b/backend/pkg/env/worker-id.go similarity index 100% rename from backend/pkg/env/worker_id.go rename to backend/pkg/env/worker-id.go diff --git a/backend/pkg/messages/batch.go b/backend/pkg/messages/batch.go index fa40db7b2..9241672a3 100644 --- a/backend/pkg/messages/batch.go +++ b/backend/pkg/messages/batch.go @@ -1,14 +1,17 @@ package messages import ( - "io" "bytes" + "io" "github.com/pkg/errors" ) func ReadBatch(b []byte, callback func(Message)) error { - reader := bytes.NewReader(b) + return ReadBatchReader(bytes.NewReader(b), callback) +} + +func ReadBatchReader(reader io.Reader, callback func(Message)) error { var index uint64 var timestamp int64 for { @@ -21,12 +24,12 @@ func ReadBatch(b []byte, callback func(Message)) error { msg = transformDepricated(msg) isBatchMeta := false - switch m := msg.(type){ - case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) + switch m := msg.(type) { + case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it) if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though return errors.New("Batch Meta found at the end of the batch") } - index = m.PageNo << 32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) + index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha) timestamp = m.Timestamp isBatchMeta = true // continue readLoop @@ -34,7 +37,7 @@ func ReadBatch(b []byte, callback func(Message)) error { if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though return errors.New("Batch Meta found at the end of the batch") } - index = m.FirstIndex + index = m.FirstIndex timestamp = int64(m.Timestamp) isBatchMeta = true // continue readLoop @@ -46,23 +49,23 @@ func ReadBatch(b []byte, callback func(Message)) error { msg.Meta().Index = index msg.Meta().Timestamp = timestamp callback(msg) - if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker + if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker index++ } } return errors.New("Error of the codeflow. (Should return on EOF)") } -const AVG_MESSAGE_SIZE = 40 // TODO: calculate OR calculate dynamically +const AVG_MESSAGE_SIZE = 40 // TODO: calculate OR calculate dynamically func WriteBatch(mList []Message) []byte { - batch := make([]byte, AVG_MESSAGE_SIZE * len(mList)) + batch := make([]byte, AVG_MESSAGE_SIZE*len(mList)) p := 0 for _, msg := range mList { msgBytes := msg.Encode() - if len(batch) < p + len(msgBytes) { - newBatch := make([]byte, 2*len(batch) + len(msgBytes)) - copy(newBatch, batch) - batch = newBatch + if len(batch) < p+len(msgBytes) { + newBatch := make([]byte, 2*len(batch)+len(msgBytes)) + copy(newBatch, batch) + batch = newBatch } copy(batch[p:], msgBytes) p += len(msgBytes) @@ -70,12 +73,12 @@ func WriteBatch(mList []Message) []byte { return batch[:p] } -func RewriteBatch(b []byte, rewrite func(Message) Message) ([]byte, error) { - mList := make([]Message, 0, len(b)/AVG_MESSAGE_SIZE) - if err := ReadBatch(b, func(m Message) { +func RewriteBatch(reader io.Reader, rewrite func(Message) Message) ([]byte, error) { + mList := make([]Message, 0, 10) // 10? + if err := ReadBatchReader(reader, func(m Message) { mList = append(mList, rewrite(m)) }); err != nil { return nil, err } return WriteBatch(mList), nil -} \ No newline at end of file +} diff --git a/backend/pkg/messages/get_timestamp.go b/backend/pkg/messages/get-timestamp.go similarity index 100% rename from backend/pkg/messages/get_timestamp.go rename to backend/pkg/messages/get-timestamp.go diff --git a/backend/pkg/messages/legacy_message_transform.go b/backend/pkg/messages/legacy-message-transform.go similarity index 100% rename from backend/pkg/messages/legacy_message_transform.go rename to backend/pkg/messages/legacy-message-transform.go diff --git a/backend/pkg/messages/read_message.go b/backend/pkg/messages/read-message.go similarity index 100% rename from backend/pkg/messages/read_message.go rename to backend/pkg/messages/read-message.go diff --git a/backend/pkg/pprof/pprof.go b/backend/pkg/pprof/pprof.go new file mode 100644 index 000000000..a05080178 --- /dev/null +++ b/backend/pkg/pprof/pprof.go @@ -0,0 +1,13 @@ +package pprof + +import ( + "log" + "net/http" + _ "net/http/pprof" +) + +func StartProfilingServer() { + go func() { + log.Println(http.ListenAndServe("localhost:6060", nil)) + }() +} diff --git a/backend/pkg/queue/import.go b/backend/pkg/queue/import.go index 2bca9c8fd..623d301ca 100644 --- a/backend/pkg/queue/import.go +++ b/backend/pkg/queue/import.go @@ -1,15 +1,14 @@ package queue import ( - "openreplay/backend/pkg/redisstream" "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/redisstream" ) -func NewConsumer(group string, topics []string, handler types.MessageHandler) types.Consumer { +func NewConsumer(group string, topics []string, handler types.MessageHandler, _ bool) types.Consumer { return redisstream.NewConsumer(group, topics, handler) } func NewProducer() types.Producer { return redisstream.NewProducer() } - diff --git a/backend/pkg/queue/messages.go b/backend/pkg/queue/messages.go index eca4a4d49..0ab184ee6 100644 --- a/backend/pkg/queue/messages.go +++ b/backend/pkg/queue/messages.go @@ -7,13 +7,12 @@ import ( "openreplay/backend/pkg/queue/types" ) - -func NewMessageConsumer(group string, topics []string, handler types.DecodedMessageHandler) types.Consumer { +func NewMessageConsumer(group string, topics []string, handler types.DecodedMessageHandler, autoCommit bool) types.Consumer { return NewConsumer(group, topics, func(sessionID uint64, value []byte, meta *types.Meta) { if err := messages.ReadBatch(value, func(msg messages.Message) { handler(sessionID, msg, meta) }); err != nil { log.Printf("Decode error: %v\n", err) } - }) + }, autoCommit) } diff --git a/backend/pkg/queue/types/types.go b/backend/pkg/queue/types/types.go index b671323d0..600babe25 100644 --- a/backend/pkg/queue/types/types.go +++ b/backend/pkg/queue/types/types.go @@ -6,26 +6,22 @@ import ( type Consumer interface { ConsumeNext() error - DisableAutoCommit() Commit() error CommitBack(gap int64) error Close() } - type Producer interface { Produce(topic string, key uint64, value []byte) error Close(timeout int) Flush(timeout int) } - type Meta struct { - ID uint64 - Topic string + ID uint64 + Topic string Timestamp int64 } type MessageHandler func(uint64, []byte, *Meta) type DecodedMessageHandler func(uint64, messages.Message, *Meta) - diff --git a/backend/pkg/redisstream/consumer.go b/backend/pkg/redisstream/consumer.go index 164ee9236..d32972981 100644 --- a/backend/pkg/redisstream/consumer.go +++ b/backend/pkg/redisstream/consumer.go @@ -1,24 +1,22 @@ package redisstream import ( + "log" "net" + "sort" "strconv" "strings" - "log" - "sort" "time" - "github.com/pkg/errors" _redis "github.com/go-redis/redis" + "github.com/pkg/errors" "openreplay/backend/pkg/queue/types" ) - - -type idsInfo struct{ - id []string - ts []int64 +type idsInfo struct { + id []string + ts []int64 } type streamPendingIDsMap map[string]*idsInfo @@ -41,26 +39,25 @@ func NewConsumer(group string, streams []string, messageHandler types.MessageHan } } - idsPending := make(streamPendingIDsMap) streamsCount := len(streams) for i := 0; i < streamsCount; i++ { - // ">" is for never-delivered messages. - // Otherwise - never acknoledged only + // ">" is for never-delivered messages. + // Otherwise - never acknoledged only // TODO: understand why in case of "0" it eats 100% cpu - streams = append(streams, ">") - + streams = append(streams, ">") + idsPending[streams[i]] = new(idsInfo) } return &Consumer{ - redis: redis, + redis: redis, messageHandler: messageHandler, - streams: streams, - group: group, - autoCommit: true, - idsPending: idsPending, + streams: streams, + group: group, + autoCommit: true, + idsPending: idsPending, } } @@ -106,9 +103,9 @@ func (c *Consumer) ConsumeNext() error { return errors.New("Too many messages per ms in redis") } c.messageHandler(sessionID, []byte(valueString), &types.Meta{ - Topic: r.Stream, + Topic: r.Stream, Timestamp: int64(ts), - ID: ts << 13 | (idx & 0x1FFF), // Max: 4096 messages/ms for 69 years + ID: ts<<13 | (idx & 0x1FFF), // Max: 4096 messages/ms for 69 years }) if c.autoCommit { if err = c.redis.XAck(r.Stream, c.group, m.ID).Err(); err != nil { @@ -119,7 +116,7 @@ func (c *Consumer) ConsumeNext() error { c.idsPending[r.Stream].id = append(c.idsPending[r.Stream].id, m.ID) c.idsPending[r.Stream].ts = append(c.idsPending[r.Stream].ts, int64(ts)) } - + } } return nil @@ -158,13 +155,9 @@ func (c *Consumer) CommitBack(gap int64) error { c.idsPending[stream].id = idsInfo.id[maxI:] c.idsPending[stream].ts = idsInfo.ts[maxI:] } - return nil -} - -func (c *Consumer) DisableAutoCommit() { - //c.autoCommit = false + return nil } func (c *Consumer) Close() { // noop -} \ No newline at end of file +} diff --git a/backend/pkg/token/tokenizer.go b/backend/pkg/token/tokenizer.go index 3f1069a63..f61e1f145 100644 --- a/backend/pkg/token/tokenizer.go +++ b/backend/pkg/token/tokenizer.go @@ -22,8 +22,8 @@ func NewTokenizer(secret string) *Tokenizer { } type TokenData struct { - ID uint64 - ExpTime int64 + ID uint64 + ExpTime int64 } func (tokenizer *Tokenizer) sign(body string) []byte { @@ -33,7 +33,7 @@ func (tokenizer *Tokenizer) sign(body string) []byte { } func (tokenizer *Tokenizer) Compose(d TokenData) string { - body := strconv.FormatUint(d.ID, 36) + + body := strconv.FormatUint(d.ID, 36) + "." + strconv.FormatInt(d.ExpTime, 36) sign := base58.Encode(tokenizer.sign(body)) return body + "." + sign @@ -58,8 +58,8 @@ func (tokenizer *Tokenizer) Parse(token string) (*TokenData, error) { if err != nil { return nil, err } - if expTime <= time.Now().UnixNano()/1e6 { - return &TokenData{id,expTime}, EXPIRED + if expTime <= time.Now().UnixMilli() { + return &TokenData{id, expTime}, EXPIRED } - return &TokenData{id,expTime}, nil + return &TokenData{id, expTime}, nil } diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go index 1fe717531..b55921149 100644 --- a/backend/pkg/url/assets/url.go +++ b/backend/pkg/url/assets/url.go @@ -5,11 +5,18 @@ import ( "path/filepath" "strconv" "strings" + "time" + + "openreplay/backend/pkg/flakeid" ) func getSessionKey(sessionID uint64) string { - // Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID - return strconv.FormatUint(sessionID>>50, 10) + return strconv.FormatUint( + uint64(time.UnixMilli( + int64(flakeid.ExtractTimestamp(sessionID)), + ).Weekday()), + 10, + ) } func ResolveURL(baseurl string, rawurl string) string { diff --git a/backend/pkg/url/url.go b/backend/pkg/url/url.go index b9181774d..0ac0f9e08 100644 --- a/backend/pkg/url/url.go +++ b/backend/pkg/url/url.go @@ -1,18 +1,23 @@ package url import ( - "strings" _url "net/url" + "strings" ) func DiscardURLQuery(url string) string { return strings.Split(url, "?")[0] -} +} -func GetURLParts(rawURL string) (string, string, error) { +func GetURLParts(rawURL string) (string, string, string, error) { u, err := _url.Parse(rawURL) if err != nil { - return "", "", err + return "", "", "", err } - return u.Host, u.RequestURI(), nil -} \ No newline at end of file + // u.Scheme u.Fragment / RawFragment ? + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + return u.Host, path, u.RawQuery, nil +} diff --git a/backend/pkg/utime/utime.go b/backend/pkg/utime/utime.go deleted file mode 100644 index e3b5a2751..000000000 --- a/backend/pkg/utime/utime.go +++ /dev/null @@ -1,11 +0,0 @@ -package utime - -import "time" - -func CurrentTimestamp() int64 { - return time.Now().UnixNano() / 1e6 -} - -func ToMilliseconds(t time.Time) int64 { - return t.UnixNano() / 1e6 -} diff --git a/backend/services/assets/cacher/cacher.go b/backend/services/assets/cacher/cacher.go index 59b09449f..70ea31928 100644 --- a/backend/services/assets/cacher/cacher.go +++ b/backend/services/assets/cacher/cacher.go @@ -1,31 +1,31 @@ package cacher import ( + "crypto/tls" "fmt" "io" "io/ioutil" "mime" "net/http" - "crypto/tls" "path/filepath" "strings" "time" "github.com/pkg/errors" - - "openreplay/backend/pkg/url/assets" + "openreplay/backend/pkg/storage" + "openreplay/backend/pkg/url/assets" ) const MAX_CACHE_DEPTH = 5 type cacher struct { - timeoutMap *timeoutMap // Concurrency implemented - s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." - httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." - rewriter *assets.Rewriter // Read only - Errors chan error - sizeLimit int + timeoutMap *timeoutMap // Concurrency implemented + s3 *storage.S3 // AWS Docs: "These clients are safe to use concurrently." + httpClient *http.Client // Docs: "Clients are safe for concurrent use by multiple goroutines." + rewriter *assets.Rewriter // Read only + Errors chan error + sizeLimit int } func NewCacher(region string, bucket string, origin string, sizeLimit int) *cacher { @@ -36,26 +36,26 @@ func NewCacher(region string, bucket string, origin string, sizeLimit int) *cach httpClient: &http.Client{ Timeout: time.Duration(6) * time.Second, Transport: &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - }, + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + }, }, - rewriter: rewriter, - Errors: make(chan error), - sizeLimit: sizeLimit, + rewriter: rewriter, + Errors: make(chan error), + sizeLimit: sizeLimit, } } func (c *cacher) cacheURL(requestURL string, sessionID uint64, depth byte, context string, isJS bool) { - if c.timeoutMap.contains(requestURL) { - return - } - c.timeoutMap.add(requestURL) var cachePath string - if (isJS) { + if isJS { cachePath = assets.GetCachePathForJS(requestURL) } else { cachePath = assets.GetCachePathForAssets(sessionID, requestURL) } + if c.timeoutMap.contains(cachePath) { + return + } + c.timeoutMap.add(cachePath) if c.s3.Exists(cachePath) { return } @@ -94,20 +94,19 @@ func (c *cacher) cacheURL(requestURL string, sessionID uint64, depth byte, conte if isCSS { strData = c.rewriter.RewriteCSS(sessionID, requestURL, strData) // TODO: one method for reqrite and return list } - - // TODO: implement in streams + + // TODO: implement in streams err = c.s3.Upload(strings.NewReader(strData), cachePath, contentType, false) if err != nil { c.Errors <- errors.Wrap(err, context) return } - c.timeoutMap.add(requestURL) if isCSS { if depth > 0 { for _, extractedURL := range assets.ExtractURLsFromCSS(string(data)) { - if fullURL, cachable := assets.GetFullCachableURL(requestURL, extractedURL); cachable { - go c.cacheURL(fullURL, sessionID, depth-1, context + "\n -> " + fullURL, false) + if fullURL, cachable := assets.GetFullCachableURL(requestURL, extractedURL); cachable { + go c.cacheURL(fullURL, sessionID, depth-1, context+"\n -> "+fullURL, false) } } if err != nil { diff --git a/backend/services/assets/cacher/timeoutMap.go b/backend/services/assets/cacher/timeoutMap.go index 36fc4ee4d..5a8e31424 100644 --- a/backend/services/assets/cacher/timeoutMap.go +++ b/backend/services/assets/cacher/timeoutMap.go @@ -5,30 +5,30 @@ import ( "time" ) -const MAX_STORAGE_TIME = 18 * time.Hour +const MAX_STORAGE_TIME = 24 * time.Hour // If problem with cache contention (>=4 core) look at sync.Map type timeoutMap struct { mx sync.RWMutex - m map[string]time.Time + m map[string]time.Time } func newTimeoutMap() *timeoutMap { return &timeoutMap{ m: make(map[string]time.Time), } -} +} func (tm *timeoutMap) add(key string) { tm.mx.Lock() - defer tm.mx.Unlock() + defer tm.mx.Unlock() tm.m[key] = time.Now() } func (tm *timeoutMap) contains(key string) bool { tm.mx.RLock() - defer tm.mx.RUnlock() + defer tm.mx.RUnlock() _, ok := tm.m[key] return ok } @@ -36,7 +36,7 @@ func (tm *timeoutMap) contains(key string) bool { func (tm *timeoutMap) deleteOutdated() { now := time.Now() tm.mx.Lock() - defer tm.mx.Unlock() + defer tm.mx.Unlock() for key, t := range tm.m { if now.Sub(t) > MAX_STORAGE_TIME { delete(tm.m, key) diff --git a/backend/services/assets/main.go b/backend/services/assets/main.go index 450dfc83c..664dc5b09 100644 --- a/backend/services/assets/main.go +++ b/backend/services/assets/main.go @@ -18,7 +18,7 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - GROUP_CACHE := env.String("GROUP_CACHE") + GROUP_CACHE := env.String("GROUP_CACHE") TOPIC_CACHE := env.String("TOPIC_CACHE") cacher := cacher.NewCacher( @@ -29,10 +29,10 @@ func main() { ) consumer := queue.NewMessageConsumer( - GROUP_CACHE, - []string{ TOPIC_CACHE }, + GROUP_CACHE, + []string{TOPIC_CACHE}, func(sessionID uint64, message messages.Message, e *types.Meta) { - switch msg := message.(type) { + switch msg := message.(type) { case *messages.AssetCache: cacher.CacheURL(sessionID, msg.URL) case *messages.ErrorEvent: @@ -47,17 +47,17 @@ func main() { for _, source := range sourceList { cacher.CacheJSFile(source) } - } + } }, + true, ) - tick := time.Tick(20 * time.Minute) sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) + signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - log.Printf("Cacher service started\n") + log.Printf("Cacher service started\n") for { select { case sig := <-sigchan: @@ -74,4 +74,4 @@ func main() { } } } -} \ No newline at end of file +} diff --git a/backend/services/db/main.go b/backend/services/db/main.go index d6190a4f0..2ad6e4aa8 100644 --- a/backend/services/db/main.go +++ b/backend/services/db/main.go @@ -74,8 +74,8 @@ func main() { } }) }, + false, ) - consumer.DisableAutoCommit() sigchan := make(chan os.Signal, 1) signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) diff --git a/backend/services/ender/builder/builder.go b/backend/services/ender/builder/builder.go index e36bdcbe3..1a89f67b6 100644 --- a/backend/services/ender/builder/builder.go +++ b/backend/services/ender/builder/builder.go @@ -110,11 +110,11 @@ func (b *builder) buildInputEvent() { func (b *builder) handleMessage(message Message, messageID uint64) { timestamp := GetTimestamp(message) - if b.timestamp <= timestamp { // unnecessary? TODO: test and remove + if b.timestamp < timestamp { // unnecessary? TODO: test and remove b.timestamp = timestamp } - b.lastProcessedTimestamp = time.Now().UnixNano() / 1e6 + b.lastProcessedTimestamp = time.Now().UnixMilli() // Might happen before the first timestamp. switch msg := message.(type) { @@ -218,14 +218,16 @@ func (b *builder) handleMessage(message Message, messageID uint64) { Type: tp, Success: success, }) - if !success && tp == "fetch" { + if !success { + issueType := "missing_resource" + if tp == "fetch" { + issueType = "bad_request" + } b.appendReadyMessage(&IssueEvent{ - Type: "bad_request", + Type: issueType, MessageID: messageID, Timestamp: msg.Timestamp, ContextString: msg.URL, - Context: "", - Payload: "", }) } case *RawCustomEvent: @@ -254,6 +256,14 @@ func (b *builder) handleMessage(message Message, messageID uint64) { Status: msg.Status, Duration: msg.Duration, }) + if msg.Status >= 400 { + b.appendReadyMessage(&IssueEvent{ + Type: "bad_request", + MessageID: messageID, + Timestamp: msg.Timestamp, + ContextString: msg.URL, + }) + } case *GraphQL: b.appendReadyMessage(&GraphQLEvent{ MessageID: messageID, diff --git a/backend/services/ender/main.go b/backend/services/ender/main.go index e8d739f0e..f2430f3a0 100644 --- a/backend/services/ender/main.go +++ b/backend/services/ender/main.go @@ -8,12 +8,12 @@ import ( "os/signal" "syscall" - "openreplay/backend/pkg/intervals" "openreplay/backend/pkg/env" + "openreplay/backend/pkg/intervals" + logger "openreplay/backend/pkg/log" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" - logger "openreplay/backend/pkg/log" "openreplay/backend/services/ender/builder" ) @@ -29,24 +29,24 @@ func main() { producer := queue.NewProducer() consumer := queue.NewMessageConsumer( - GROUP_EVENTS, - []string{ + GROUP_EVENTS, + []string{ env.String("TOPIC_RAW_WEB"), env.String("TOPIC_RAW_IOS"), - }, + }, func(sessionID uint64, msg messages.Message, meta *types.Meta) { statsLogger.HandleAndLog(sessionID, meta) builderMap.HandleMessage(sessionID, msg, msg.Meta().Index) }, + false, ) - consumer.DisableAutoCommit() - + tick := time.Tick(intervals.EVENTS_COMMIT_INTERVAL * time.Millisecond) sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) + signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - log.Printf("Ender service started\n") + log.Printf("Ender service started\n") for { select { case sig := <-sigchan: @@ -55,8 +55,8 @@ func main() { consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP) consumer.Close() os.Exit(0) - case <- tick: - builderMap.IterateReadyMessages(time.Now().UnixNano()/1e6, func(sessionID uint64, readyMsg messages.Message) { + case <-tick: + builderMap.IterateReadyMessages(time.Now().UnixMilli(), func(sessionID uint64, readyMsg messages.Message) { producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(readyMsg)) }) // TODO: why exactly do we need Flush here and not in any other place? @@ -69,4 +69,3 @@ func main() { } } } - diff --git a/backend/services/http/handlers_depricated.go b/backend/services/http/handlers-depricated.go similarity index 100% rename from backend/services/http/handlers_depricated.go rename to backend/services/http/handlers-depricated.go diff --git a/backend/services/http/handlers_ios.go b/backend/services/http/handlers-ios.go similarity index 68% rename from backend/services/http/handlers_ios.go rename to backend/services/http/handlers-ios.go index 6c3f945bd..8116980e1 100644 --- a/backend/services/http/handlers_ios.go +++ b/backend/services/http/handlers-ios.go @@ -2,55 +2,55 @@ package main import ( "encoding/json" - "net/http" "errors" - "time" - "math/rand" - "strconv" "log" + "math/rand" + "net/http" + "strconv" + "time" "openreplay/backend/pkg/db/postgres" - "openreplay/backend/pkg/token" . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/token" ) -const FILES_SIZE_LIMIT int64 = 1e7 // 10Mb +const FILES_SIZE_LIMIT int64 = 1e7 // 10Mb func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { type request struct { - Token string `json:"token"` - ProjectKey *string `json:"projectKey"` - TrackerVersion string `json:"trackerVersion"` - RevID string `json:"revID"` - UserUUID *string `json:"userUUID"` + Token string `json:"token"` + ProjectKey *string `json:"projectKey"` + TrackerVersion string `json:"trackerVersion"` + RevID string `json:"revID"` + UserUUID *string `json:"userUUID"` //UserOS string `json"userOS"` //hardcoded 'MacOS' - UserOSVersion string `json:"userOSVersion"` - UserDevice string `json:"userDevice"` - Timestamp uint64 `json:"timestamp"` + UserOSVersion string `json:"userOSVersion"` + UserDevice string `json:"userDevice"` + Timestamp uint64 `json:"timestamp"` // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac // “performances”:{ - // “activeProcessorCount”:8, - // “isLowPowerModeEnabled”:0, - // “orientation”:0, - // “systemUptime”:585430, - // “batteryState”:0, - // “thermalState”:0, - // “batteryLevel”:0, - // “processorCount”:8, - // “physicalMemory”:17179869184 - // }, + // “activeProcessorCount”:8, + // “isLowPowerModeEnabled”:0, + // “orientation”:0, + // “systemUptime”:585430, + // “batteryState”:0, + // “thermalState”:0, + // “batteryLevel”:0, + // “processorCount”:8, + // “physicalMemory”:17179869184 + // }, } type response struct { - Token string `json:"token"` - ImagesHashList []string `json:"imagesHashList"` - UserUUID string `json:"userUUID"` - BeaconSizeLimit int64 `json:"beaconSizeLimit"` - SessionID string `json:"sessionID"` + Token string `json:"token"` + ImagesHashList []string `json:"imagesHashList"` + UserUUID string `json:"userUUID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` + SessionID string `json:"sessionID"` } startTime := time.Now() req := &request{} body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) - //defer body.Close() + defer body.Close() if err := json.NewDecoder(body).Decode(req); err != nil { responseWithError(w, http.StatusBadRequest, err) return @@ -85,29 +85,29 @@ func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { responseWithError(w, http.StatusForbidden, errors.New("browser not recognized")) return } - sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6)) + sessionID, err := flaker.Compose(uint64(startTime.UnixMilli())) if err != nil { responseWithError(w, http.StatusInternalServerError, err) return } // TODO: if EXPIRED => send message for two sessions association expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond) - tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6} + tokenData = &token.TokenData{sessionID, expTime.UnixMilli()} country := geoIP.ExtractISOCodeFromHTTPRequest(r) // The difference with web is mostly here: producer.Produce(TOPIC_RAW_IOS, tokenData.ID, Encode(&IOSSessionStart{ - Timestamp: req.Timestamp, - ProjectID: uint64(p.ProjectID), - TrackerVersion: req.TrackerVersion, - RevID: req.RevID, - UserUUID: userUUID, - UserOS: "IOS", - UserOSVersion: req.UserOSVersion, - UserDevice: MapIOSDevice(req.UserDevice), - UserDeviceType: GetIOSDeviceType(req.UserDevice), - UserCountry: country, + Timestamp: req.Timestamp, + ProjectID: uint64(p.ProjectID), + TrackerVersion: req.TrackerVersion, + RevID: req.RevID, + UserUUID: userUUID, + UserOS: "IOS", + UserOSVersion: req.UserOSVersion, + UserDevice: MapIOSDevice(req.UserDevice), + UserDeviceType: GetIOSDeviceType(req.UserDevice), + UserCountry: country, })) } @@ -119,14 +119,13 @@ func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) { responseWithJSON(w, &response{ // ImagesHashList: imagesHashList, - Token: tokenizer.Compose(*tokenData), - UserUUID: userUUID, - SessionID: strconv.FormatUint(tokenData.ID, 10), + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), BeaconSizeLimit: BEACON_SIZE_LIMIT, }) } - func pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { sessionData, err := tokenizer.ParseFromHTTPRequest(r) if err != nil { @@ -136,8 +135,6 @@ func pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { pushMessages(w, r, sessionData.ID, TOPIC_RAW_IOS) } - - func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { sessionData, err := tokenizer.ParseFromHTTPRequest(r) if err != nil && err != token.EXPIRED { @@ -145,10 +142,9 @@ func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) { return } // Check timestamps here? - pushMessages(w, r, sessionData.ID,TOPIC_RAW_IOS) + pushMessages(w, r, sessionData.ID, TOPIC_RAW_IOS) } - func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { log.Printf("recieved imagerequest") @@ -159,16 +155,16 @@ func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { } r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT) - // defer r.Body.Close() + defer r.Body.Close() err = r.ParseMultipartForm(1e6) // ~1Mb if err == http.ErrNotMultipart || err == http.ErrMissingBoundary { responseWithError(w, http.StatusUnsupportedMediaType, err) - // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB + // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB } else if err != nil { responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging } - if (r.MultipartForm == nil) { + if r.MultipartForm == nil { responseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed")) } @@ -177,7 +173,7 @@ func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { return } - prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/" + prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/" for _, fileHeaderList := range r.MultipartForm.File { for _, fileHeader := range fileHeaderList { @@ -187,7 +183,7 @@ func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) { } key := prefix + fileHeader.Filename log.Printf("Uploading image... %v", key) - go func() { //TODO: mime type from header + go func() { //TODO: mime type from header if err := s3.Upload(file, key, "image/jpeg", false); err != nil { log.Printf("Upload ios screen error. %v", err) } diff --git a/backend/services/http/handlers_web.go b/backend/services/http/handlers-web.go similarity index 89% rename from backend/services/http/handlers_web.go rename to backend/services/http/handlers-web.go index 09d2511d8..7aab5bfbc 100644 --- a/backend/services/http/handlers_web.go +++ b/backend/services/http/handlers-web.go @@ -3,7 +3,6 @@ package main import ( "encoding/json" "errors" - "io/ioutil" "log" "math/rand" "net/http" @@ -11,8 +10,8 @@ import ( "time" "openreplay/backend/pkg/db/postgres" - "openreplay/backend/pkg/token" . "openreplay/backend/pkg/messages" + "openreplay/backend/pkg/token" ) func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { @@ -30,18 +29,18 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { UserID string `json:"userID"` } type response struct { - Timestamp int64 `json:"timestamp"` - Delay int64 `json:"delay"` - Token string `json:"token"` - UserUUID string `json:"userUUID"` - SessionID string `json:"sessionID"` - BeaconSizeLimit int64 `json:"beaconSizeLimit"` + Timestamp int64 `json:"timestamp"` + Delay int64 `json:"delay"` + Token string `json:"token"` + UserUUID string `json:"userUUID"` + SessionID string `json:"sessionID"` + BeaconSizeLimit int64 `json:"beaconSizeLimit"` } startTime := time.Now() req := &request{} body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error? - //defer body.Close() + defer body.Close() if err := json.NewDecoder(body).Decode(req); err != nil { responseWithError(w, http.StatusBadRequest, err) return @@ -102,7 +101,7 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { UserCountry: country, UserDeviceMemorySize: req.DeviceMemory, UserDeviceHeapSize: req.JsHeapSizeLimit, - UserID: req.UserID, + UserID: req.UserID, })) } @@ -110,9 +109,9 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) { responseWithJSON(w, &response{ //Timestamp: startTime.UnixNano() / 1e6, //Delay: delayDuration.Nanoseconds() / 1e6, - Token: tokenizer.Compose(*tokenData), - UserUUID: userUUID, - SessionID: strconv.FormatUint(tokenData.ID, 10), + Token: tokenizer.Compose(*tokenData), + UserUUID: userUUID, + SessionID: strconv.FormatUint(tokenData.ID, 10), BeaconSizeLimit: BEACON_SIZE_LIMIT, }) } @@ -124,18 +123,9 @@ func pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) { return } body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) - //defer body.Close() - buf, err := ioutil.ReadAll(body) - if err != nil { - responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging - return - } - //log.Printf("Sending batch...") - //startTime := time.Now() + defer body.Close() - // analyticsMessages := make([]Message, 0, 200) - - rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message { + rewritenBuf, err := RewriteBatch(body, func(msg Message) Message { switch m := msg.(type) { case *SetNodeAttributeURLBased: if m.Name == "src" || m.Name == "href" { @@ -248,4 +238,4 @@ func notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) { log.Printf("Unable to insert Unstarted Session: %v\n", err) } w.WriteHeader(http.StatusOK) -} \ No newline at end of file +} diff --git a/backend/services/http/handlers.go b/backend/services/http/handlers.go index e45e84e64..dd73925af 100644 --- a/backend/services/http/handlers.go +++ b/backend/services/http/handlers.go @@ -9,11 +9,11 @@ import ( gzip "github.com/klauspost/pgzip" ) -const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb +const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) { body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT) - //defer body.Close() + defer body.Close() var reader io.ReadCloser var err error switch r.Header.Get("Content-Encoding") { diff --git a/backend/services/http/ios-device.go b/backend/services/http/ios-device.go new file mode 100644 index 000000000..bec1f3b36 --- /dev/null +++ b/backend/services/http/ios-device.go @@ -0,0 +1,138 @@ +package main + +import ( + "strings" +) + +func MapIOSDevice(identifier string) string { + switch identifier { + case "iPod5,1": + return "iPod touch (5th generation)" + case "iPod7,1": + return "iPod touch (6th generation)" + case "iPod9,1": + return "iPod touch (7th generation)" + case "iPhone3,1", "iPhone3,2", "iPhone3,3": + return "iPhone 4" + case "iPhone4,1": + return "iPhone 4s" + case "iPhone5,1", "iPhone5,2": + return "iPhone 5" + case "iPhone5,3", "iPhone5,4": + return "iPhone 5c" + case "iPhone6,1", "iPhone6,2": + return "iPhone 5s" + case "iPhone7,2": + return "iPhone 6" + case "iPhone7,1": + return "iPhone 6 Plus" + case "iPhone8,1": + return "iPhone 6s" + case "iPhone8,2": + return "iPhone 6s Plus" + case "iPhone8,4": + return "iPhone SE" + case "iPhone9,1", "iPhone9,3": + return "iPhone 7" + case "iPhone9,2", "iPhone9,4": + return "iPhone 7 Plus" + case "iPhone10,1", "iPhone10,4": + return "iPhone 8" + case "iPhone10,2", "iPhone10,5": + return "iPhone 8 Plus" + case "iPhone10,3", "iPhone10,6": + return "iPhone X" + case "iPhone11,2": + return "iPhone XS" + case "iPhone11,4", "iPhone11,6": + return "iPhone XS Max" + case "iPhone11,8": + return "iPhone XR" + case "iPhone12,1": + return "iPhone 11" + case "iPhone12,3": + return "iPhone 11 Pro" + case "iPhone12,5": + return "iPhone 11 Pro Max" + case "iPhone12,8": + return "iPhone SE (2nd generation)" + case "iPhone13,1": + return "iPhone 12 mini" + case "iPhone13,2": + return "iPhone 12" + case "iPhone13,3": + return "iPhone 12 Pro" + case "iPhone13,4": + return "iPhone 12 Pro Max" + case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4": + return "iPad 2" + case "iPad3,1", "iPad3,2", "iPad3,3": + return "iPad (3rd generation)" + case "iPad3,4", "iPad3,5", "iPad3,6": + return "iPad (4th generation)" + case "iPad6,11", "iPad6,12": + return "iPad (5th generation)" + case "iPad7,5", "iPad7,6": + return "iPad (6th generation)" + case "iPad7,11", "iPad7,12": + return "iPad (7th generation)" + case "iPad11,6", "iPad11,7": + return "iPad (8th generation)" + case "iPad4,1", "iPad4,2", "iPad4,3": + return "iPad Air" + case "iPad5,3", "iPad5,4": + return "iPad Air 2" + case "iPad11,3", "iPad11,4": + return "iPad Air (3rd generation)" + case "iPad13,1", "iPad13,2": + return "iPad Air (4th generation)" + case "iPad2,5", "iPad2,6", "iPad2,7": + return "iPad mini" + case "iPad4,4", "iPad4,5", "iPad4,6": + return "iPad mini 2" + case "iPad4,7", "iPad4,8", "iPad4,9": + return "iPad mini 3" + case "iPad5,1", "iPad5,2": + return "iPad mini 4" + case "iPad11,1", "iPad11,2": + return "iPad mini (5th generation)" + case "iPad6,3", "iPad6,4": + return "iPad Pro (9.7-inch)" + case "iPad7,3", "iPad7,4": + return "iPad Pro (10.5-inch)" + case "iPad8,1", "iPad8,2", "iPad8,3", "iPad8,4": + return "iPad Pro (11-inch) (1st generation)" + case "iPad8,9", "iPad8,10": + return "iPad Pro (11-inch) (2nd generation)" + case "iPad6,7", "iPad6,8": + return "iPad Pro (12.9-inch) (1st generation)" + case "iPad7,1", "iPad7,2": + return "iPad Pro (12.9-inch) (2nd generation)" + case "iPad8,5", "iPad8,6", "iPad8,7", "iPad8,8": + return "iPad Pro (12.9-inch) (3rd generation)" + case "iPad8,11", "iPad8,12": + return "iPad Pro (12.9-inch) (4th generation)" + case "AppleTV5,3": + return "Apple TV" + case "AppleTV6,2": + return "Apple TV 4K" + case "AudioAccessory1,1": + return "HomePod" + case "AudioAccessory5,1": + return "HomePod mini" + case "i386", "x86_64": + return "Simulator" + default: + return identifier + } +} + +func GetIOSDeviceType(identifier string) string { + if strings.Contains(identifier, "iPhone") { + return "mobile" //"phone" + } + if strings.Contains(identifier, "iPad") { + return "tablet" + } + return "other" +} diff --git a/backend/services/http/ios_device.go b/backend/services/http/ios_device.go deleted file mode 100644 index 2c3474157..000000000 --- a/backend/services/http/ios_device.go +++ /dev/null @@ -1,79 +0,0 @@ -package main - -import ( - "strings" -) - -func MapIOSDevice(identifier string) string { - switch identifier { - case "iPod5,1": return "iPod touch (5th generation)" - case "iPod7,1": return "iPod touch (6th generation)" - case "iPod9,1": return "iPod touch (7th generation)" - case "iPhone3,1", "iPhone3,2", "iPhone3,3": return "iPhone 4" - case "iPhone4,1": return "iPhone 4s" - case "iPhone5,1", "iPhone5,2": return "iPhone 5" - case "iPhone5,3", "iPhone5,4": return "iPhone 5c" - case "iPhone6,1", "iPhone6,2": return "iPhone 5s" - case "iPhone7,2": return "iPhone 6" - case "iPhone7,1": return "iPhone 6 Plus" - case "iPhone8,1": return "iPhone 6s" - case "iPhone8,2": return "iPhone 6s Plus" - case "iPhone8,4": return "iPhone SE" - case "iPhone9,1", "iPhone9,3": return "iPhone 7" - case "iPhone9,2", "iPhone9,4": return "iPhone 7 Plus" - case "iPhone10,1", "iPhone10,4": return "iPhone 8" - case "iPhone10,2", "iPhone10,5": return "iPhone 8 Plus" - case "iPhone10,3", "iPhone10,6": return "iPhone X" - case "iPhone11,2": return "iPhone XS" - case "iPhone11,4", "iPhone11,6": return "iPhone XS Max" - case "iPhone11,8": return "iPhone XR" - case "iPhone12,1": return "iPhone 11" - case "iPhone12,3": return "iPhone 11 Pro" - case "iPhone12,5": return "iPhone 11 Pro Max" - case "iPhone12,8": return "iPhone SE (2nd generation)" - case "iPhone13,1": return "iPhone 12 mini" - case "iPhone13,2": return "iPhone 12" - case "iPhone13,3": return "iPhone 12 Pro" - case "iPhone13,4": return "iPhone 12 Pro Max" - case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4":return "iPad 2" - case "iPad3,1", "iPad3,2", "iPad3,3": return "iPad (3rd generation)" - case "iPad3,4", "iPad3,5", "iPad3,6": return "iPad (4th generation)" - case "iPad6,11", "iPad6,12": return "iPad (5th generation)" - case "iPad7,5", "iPad7,6": return "iPad (6th generation)" - case "iPad7,11", "iPad7,12": return "iPad (7th generation)" - case "iPad11,6", "iPad11,7": return "iPad (8th generation)" - case "iPad4,1", "iPad4,2", "iPad4,3": return "iPad Air" - case "iPad5,3", "iPad5,4": return "iPad Air 2" - case "iPad11,3", "iPad11,4": return "iPad Air (3rd generation)" - case "iPad13,1", "iPad13,2": return "iPad Air (4th generation)" - case "iPad2,5", "iPad2,6", "iPad2,7": return "iPad mini" - case "iPad4,4", "iPad4,5", "iPad4,6": return "iPad mini 2" - case "iPad4,7", "iPad4,8", "iPad4,9": return "iPad mini 3" - case "iPad5,1", "iPad5,2": return "iPad mini 4" - case "iPad11,1", "iPad11,2": return "iPad mini (5th generation)" - case "iPad6,3", "iPad6,4": return "iPad Pro (9.7-inch)" - case "iPad7,3", "iPad7,4": return "iPad Pro (10.5-inch)" - case "iPad8,1", "iPad8,2", "iPad8,3", "iPad8,4":return "iPad Pro (11-inch) (1st generation)" - case "iPad8,9", "iPad8,10": return "iPad Pro (11-inch) (2nd generation)" - case "iPad6,7", "iPad6,8": return "iPad Pro (12.9-inch) (1st generation)" - case "iPad7,1", "iPad7,2": return "iPad Pro (12.9-inch) (2nd generation)" - case "iPad8,5", "iPad8,6", "iPad8,7", "iPad8,8":return "iPad Pro (12.9-inch) (3rd generation)" - case "iPad8,11", "iPad8,12": return "iPad Pro (12.9-inch) (4th generation)" - case "AppleTV5,3": return "Apple TV" - case "AppleTV6,2": return "Apple TV 4K" - case "AudioAccessory1,1": return "HomePod" - case "AudioAccessory5,1": return "HomePod mini" - case "i386", "x86_64": return "Simulator" - default: return identifier - } -} - -func GetIOSDeviceType(identifier string) string { - if strings.Contains(identifier, "iPhone") { - return "mobile" //"phone" - } - if strings.Contains(identifier, "iPad") { - return "tablet" - } - return "other" -} \ No newline at end of file diff --git a/backend/services/http/main.go b/backend/services/http/main.go index 8ed8b6d95..1f3bc93b3 100644 --- a/backend/services/http/main.go +++ b/backend/services/http/main.go @@ -10,19 +10,19 @@ import ( "golang.org/x/net/http2" - + "openreplay/backend/pkg/db/cache" + "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/env" "openreplay/backend/pkg/flakeid" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" "openreplay/backend/pkg/storage" - "openreplay/backend/pkg/db/postgres" - "openreplay/backend/pkg/db/cache" - "openreplay/backend/pkg/url/assets" "openreplay/backend/pkg/token" + "openreplay/backend/pkg/url/assets" "openreplay/backend/services/http/geoip" "openreplay/backend/services/http/uaparser" + "openreplay/backend/pkg/pprof" ) var rewriter *assets.Rewriter @@ -38,12 +38,14 @@ var TOPIC_RAW_WEB string var TOPIC_RAW_IOS string var TOPIC_CACHE string var TOPIC_TRIGGER string + //var TOPIC_ANALYTICS string var CACHE_ASSESTS bool var BEACON_SIZE_LIMIT int64 func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + pprof.StartProfilingServer() producer = queue.NewProducer() defer producer.Close(15000) @@ -53,7 +55,7 @@ func main() { TOPIC_TRIGGER = env.String("TOPIC_TRIGGER") //TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS") rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN")) - pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20) + pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000*60*20) defer pgconn.Close() s3 = storage.NewS3(env.String("AWS_REGION"), env.String("S3_BUCKET_IOS_IMAGES")) tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET")) @@ -70,7 +72,7 @@ func main() { Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // TODO: agree with specification - w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Methods", "POST") w.Header().Set("Access-Control-Allow-Headers", "Content-Type,Authorization") if r.Method == http.MethodOptions { @@ -79,13 +81,12 @@ func main() { return } - log.Printf("Request: %v - %v ", r.Method, r.URL.Path) - + log.Printf("Request: %v - %v ", r.Method, r.URL.Path) switch r.URL.Path { case "/": w.WriteHeader(http.StatusOK) - case "/v1/web/not-started": + case "/v1/web/not-started": switch r.Method { case http.MethodPost: notStartedHandlerWeb(w, r) diff --git a/backend/services/http/project_id.go b/backend/services/http/project_id.go deleted file mode 100644 index 059576fe8..000000000 --- a/backend/services/http/project_id.go +++ /dev/null @@ -1,12 +0,0 @@ -package main - -func decodeProjectID(projectID uint64) uint64 { - if projectID < 0x10000000000000 || projectID >= 0x20000000000000 { - return 0 - } - projectID = (projectID - 0x10000000000000) * 4212451012670231 & 0xfffffffffffff - if projectID > 0xffffffff { - return 0 - } - return projectID -} diff --git a/backend/services/integrations/integration/bugsnag.go b/backend/services/integrations/integration/bugsnag.go index 7c31db3cb..118cdb84d 100644 --- a/backend/services/integrations/integration/bugsnag.go +++ b/backend/services/integrations/integration/bugsnag.go @@ -1,15 +1,14 @@ package integration import ( + "encoding/json" "fmt" + "io" + "io/ioutil" "net/http" - "encoding/json" "net/url" "time" - "io" - "io/ioutil" - "openreplay/backend/pkg/utime" "openreplay/backend/pkg/messages" ) @@ -18,15 +17,14 @@ import ( */ type bugsnag struct { - BugsnagProjectId string // `json:"bugsnag_project_id"` + BugsnagProjectId string // `json:"bugsnag_project_id"` AuthorizationToken string // `json:"auth_token"` } - type bugsnagEvent struct { MetaData struct { SpecialInfo struct { - AsayerSessionId uint64 `json:"asayerSessionId,string"` + AsayerSessionId uint64 `json:"asayerSessionId,string"` OpenReplaySessionToken string `json:"openReplaySessionToken"` } `json:"special_info"` } `json:"metaData"` @@ -38,7 +36,7 @@ type bugsnagEvent struct { func (b *bugsnag) Request(c *client) error { sinceTs := c.getLastMessageTimestamp() + 1000 // From next second - sinceFormatted := time.Unix(0, int64(sinceTs*1e6)).Format(time.RFC3339) + sinceFormatted := time.UnixMilli(int64(sinceTs)).Format(time.RFC3339) requestURL := fmt.Sprintf("https://api.bugsnag.com/projects/%v/events", b.BugsnagProjectId) req, err := http.NewRequest("GET", requestURL, nil) if err != nil { @@ -47,10 +45,10 @@ func (b *bugsnag) Request(c *client) error { q := req.URL.Query() // q.Add("per_page", "100") // Up to a maximum of 30. Default: 30 // q.Add("sort", "timestamp") // Default: timestamp (timestamp == ReceivedAt ??) - q.Add("direction", "asc") // Default: desc + q.Add("direction", "asc") // Default: desc q.Add("full_reports", "true") // Default: false - q.Add("filters[event.since][][type]", "eq") - q.Add("filters[event.since][][value]", sinceFormatted) // seems like inclusively + q.Add("filters[event.since][][type]", "eq") + q.Add("filters[event.since][][value]", sinceFormatted) // seems like inclusively req.URL.RawQuery = q.Encode() authToken := "token " + b.AuthorizationToken @@ -85,7 +83,7 @@ func (b *bugsnag) Request(c *client) error { } sessionID := e.MetaData.SpecialInfo.AsayerSessionId token := e.MetaData.SpecialInfo.OpenReplaySessionToken - if sessionID == 0 && token == "" { + if sessionID == 0 && token == "" { // c.errChan <- "No AsayerSessionId found. | Message: %v", e continue } @@ -94,16 +92,16 @@ func (b *bugsnag) Request(c *client) error { c.errChan <- err continue } - timestamp := uint64(utime.ToMilliseconds(parsedTime)) + timestamp := uint64(parsedTime.UnixMilli()) c.setLastMessageTimestamp(timestamp) c.evChan <- &SessionErrorEvent{ SessionID: sessionID, - Token: token, + Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "bugsnag", + Source: "bugsnag", Timestamp: timestamp, - Name: e.Exceptions[0].Message, - Payload: string(jsonEvent), + Name: e.Exceptions[0].Message, + Payload: string(jsonEvent), }, } } diff --git a/backend/services/integrations/integration/client.go b/backend/services/integrations/integration/client.go index 2abf9913d..315bfe4e9 100644 --- a/backend/services/integrations/integration/client.go +++ b/backend/services/integrations/integration/client.go @@ -5,10 +5,10 @@ import ( "fmt" "log" "sync" + "time" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/utime" ) const MAX_ATTEMPTS_IN_A_ROW = 4 @@ -20,10 +20,10 @@ type requester interface { } type requestData struct { - LastMessageTimestamp uint64 // `json:"lastMessageTimestamp, string"` - LastMessageId string + LastMessageTimestamp uint64 // `json:"lastMessageTimestamp, string"` + LastMessageId string UnsuccessfullAttemptsCount int - LastAttemptTimestamp int64 + LastAttemptTimestamp int64 } type client struct { @@ -31,19 +31,19 @@ type client struct { requester integration *postgres.Integration // TODO: timeout ? - mux sync.Mutex + mux sync.Mutex updateChan chan<- postgres.Integration - evChan chan<- *SessionErrorEvent - errChan chan<- error + evChan chan<- *SessionErrorEvent + errChan chan<- error } type SessionErrorEvent struct { SessionID uint64 - Token string + Token string *messages.RawErrorEvent } -type ClientMap map[ string ]*client +type ClientMap map[string]*client func NewClient(i *postgres.Integration, updateChan chan<- postgres.Integration, evChan chan<- *SessionErrorEvent, errChan chan<- error) (*client, error) { c := new(client) @@ -60,15 +60,14 @@ func NewClient(i *postgres.Integration, updateChan chan<- postgres.Integration, // TODO: RequestData manager if c.requestData.LastMessageTimestamp == 0 { // ? - c.requestData.LastMessageTimestamp = uint64(utime.CurrentTimestamp() - 24*60*60*1000) + c.requestData.LastMessageTimestamp = uint64(time.Now().Add(-time.Hour * 24).UnixMilli()) } return c, nil } - // from outside -func (c* client) Update(i *postgres.Integration) error { +func (c *client) Update(i *postgres.Integration) error { c.mux.Lock() defer c.mux.Unlock() var r requester @@ -111,8 +110,8 @@ func (c *client) getLastMessageTimestamp() uint64 { } func (c *client) setLastMessageId(timestamp uint64, id string) { //if timestamp >= c.requestData.LastMessageTimestamp { - c.requestData.LastMessageId = id - c.requestData.LastMessageTimestamp = timestamp + c.requestData.LastMessageId = id + c.requestData.LastMessageTimestamp = timestamp //} } func (c *client) getLastMessageId() string { @@ -128,18 +127,18 @@ func (c *client) Request() { c.mux.Lock() defer c.mux.Unlock() if c.requestData.UnsuccessfullAttemptsCount >= MAX_ATTEMPTS || - (c.requestData.UnsuccessfullAttemptsCount >= MAX_ATTEMPTS_IN_A_ROW && - utime.CurrentTimestamp() - c.requestData.LastAttemptTimestamp < ATTEMPTS_INTERVAL) { + (c.requestData.UnsuccessfullAttemptsCount >= MAX_ATTEMPTS_IN_A_ROW && + time.Now().UnixMilli()-c.requestData.LastAttemptTimestamp < ATTEMPTS_INTERVAL) { return } - c.requestData.LastAttemptTimestamp = utime.CurrentTimestamp() + c.requestData.LastAttemptTimestamp = time.Now().UnixMilli() err := c.requester.Request(c) if err != nil { log.Println("ERRROR L139") log.Println(err) c.handleError(err) - c.requestData.UnsuccessfullAttemptsCount++; + c.requestData.UnsuccessfullAttemptsCount++ } else { c.requestData.UnsuccessfullAttemptsCount = 0 } @@ -152,5 +151,3 @@ func (c *client) Request() { c.integration.RequestData = rd c.updateChan <- *c.integration } - - diff --git a/backend/services/integrations/integration/datadog.go b/backend/services/integrations/integration/datadog.go index eb7b5daee..096c3b822 100644 --- a/backend/services/integrations/integration/datadog.go +++ b/backend/services/integrations/integration/datadog.go @@ -1,38 +1,37 @@ package integration import ( - "fmt" - "net/http" - "encoding/json" "bytes" - "time" + "encoding/json" + "fmt" "io" - "io/ioutil" + "io/ioutil" + "net/http" + "time" - "openreplay/backend/pkg/utime" "openreplay/backend/pkg/messages" ) -/* +/* We collect Logs. Datadog also has Events */ type datadog struct { - ApplicationKey string //`json:"application_key"` - ApiKey string //`json:"api_key"` + ApplicationKey string //`json:"application_key"` + ApiKey string //`json:"api_key"` } type datadogResponce struct { - Logs []json.RawMessage + Logs []json.RawMessage NextLogId *string - Status string + Status string } type datadogLog struct { Content struct { - Timestamp string - Message string + Timestamp string + Message string Attributes struct { Error struct { // Not sure about this Message string @@ -48,10 +47,10 @@ func (d *datadog) makeRequest(nextLogId *string, fromTs uint64, toTs uint64) (*h d.ApplicationKey, ) startAt := "null" - if nextLogId != nil && *nextLogId != "" { + if nextLogId != nil && *nextLogId != "" { startAt = *nextLogId } - // Query: status:error/info/warning? + // Query: status:error/info/warning? // openReplaySessionToken instead of asayer_session_id jsonBody := fmt.Sprintf(`{ "limit": 1000, @@ -72,8 +71,8 @@ func (d *datadog) makeRequest(nextLogId *string, fromTs uint64, toTs uint64) (*h } func (d *datadog) Request(c *client) error { - fromTs := c.getLastMessageTimestamp() + 1 // From next millisecond - toTs := uint64(utime.CurrentTimestamp()) + fromTs := c.getLastMessageTimestamp() + 1 // From next millisecond + toTs := uint64(time.Now().UnixMilli()) var nextLogId *string for { req, err := d.makeRequest(nextLogId, fromTs, toTs) @@ -111,16 +110,16 @@ func (d *datadog) Request(c *client) error { c.errChan <- err continue } - timestamp := uint64(utime.ToMilliseconds(parsedTime)) + timestamp := uint64(parsedTime.UnixMilli()) c.setLastMessageTimestamp(timestamp) c.evChan <- &SessionErrorEvent{ //SessionID: sessionID, Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "datadog", + Source: "datadog", Timestamp: timestamp, - Name: ddLog.Content.Attributes.Error.Message, - Payload: string(jsonLog), + Name: ddLog.Content.Attributes.Error.Message, + Payload: string(jsonLog), }, } } @@ -129,4 +128,4 @@ func (d *datadog) Request(c *client) error { return nil } } -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/elasticsearch.go b/backend/services/integrations/integration/elasticsearch.go index 14480e0b8..dd6f5d5f9 100644 --- a/backend/services/integrations/integration/elasticsearch.go +++ b/backend/services/integrations/integration/elasticsearch.go @@ -12,7 +12,6 @@ import ( "time" "openreplay/backend/pkg/messages" - "openreplay/backend/pkg/utime" ) type elasticsearch struct { @@ -164,7 +163,7 @@ func (es *elasticsearch) Request(c *client) error { c.errChan <- err continue } - timestamp := uint64(utime.ToMilliseconds(esLog.Time)) + timestamp := uint64(esLog.Time.UnixMilli()) c.setLastMessageTimestamp(timestamp) var sessionID uint64 diff --git a/backend/services/integrations/integration/newrelic.go b/backend/services/integrations/integration/newrelic.go index 937ab166d..2dce79aa5 100644 --- a/backend/services/integrations/integration/newrelic.go +++ b/backend/services/integrations/integration/newrelic.go @@ -2,25 +2,24 @@ package integration import ( "encoding/json" - "time" + "errors" "fmt" - "net/http" "io" - "io/ioutil" - "errors" + "io/ioutil" + "net/http" + "time" "openreplay/backend/pkg/messages" ) /* - We use insights-api for query. They also have Logs and Events + We use insights-api for query. They also have Logs and Events */ - // TODO: Eu/us type newrelic struct { - ApplicationId string //`json:"application_id"` - XQueryKey string //`json:"x_query_key"` + ApplicationId string //`json:"application_id"` + XQueryKey string //`json:"x_query_key"` } // TODO: Recheck @@ -34,14 +33,14 @@ type newrelicResponce struct { type newrelicEvent struct { //AsayerSessionID uint64 `json:"asayer_session_id,string"` // string/int decoder? OpenReplaySessionToken string `json:"openReplaySessionToken"` - ErrorClass string `json:"error.class"` - Timestamp uint64 `json:"timestamp"` + ErrorClass string `json:"error.class"` + Timestamp uint64 `json:"timestamp"` } func (nr *newrelic) Request(c *client) error { sinceTs := c.getLastMessageTimestamp() + 1000 // From next second // In docs - format "yyyy-mm-dd HH:MM:ss", but time.RFC3339 works fine too - sinceFormatted := time.Unix(0, int64(sinceTs*1e6)).Format(time.RFC3339) + sinceFormatted := time.UnixMilli(int64(sinceTs)).Format(time.RFC3339) // US/EU endpoint ?? requestURL := fmt.Sprintf("https://insights-api.eu.newrelic.com/v1/accounts/%v/query", nr.ApplicationId) req, err := http.NewRequest("GET", requestURL, nil) @@ -64,11 +63,10 @@ func (nr *newrelic) Request(c *client) error { } defer resp.Body.Close() - // 401 (unauthorised) if wrong XQueryKey/deploymentServer is wrong or 403 (Forbidden) if ApplicationId is wrong // 400 if Query has problems if resp.StatusCode >= 400 { - io.Copy(ioutil.Discard, resp.Body) // Read the body to free socket + io.Copy(ioutil.Discard, resp.Body) // Read the body to free socket return fmt.Errorf("Newrelic: server respond with the code %v| Request: ", resp.StatusCode, *req) } // Pagination depending on returning metadata ? @@ -92,13 +90,13 @@ func (nr *newrelic) Request(c *client) error { c.evChan <- &SessionErrorEvent{ Token: e.OpenReplaySessionToken, RawErrorEvent: &messages.RawErrorEvent{ - Source: "newrelic", + Source: "newrelic", Timestamp: e.Timestamp, - Name: e.ErrorClass, - Payload: string(jsonEvent), + Name: e.ErrorClass, + Payload: string(jsonEvent), }, } } } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/sentry.go b/backend/services/integrations/integration/sentry.go index 0330430c3..1c5bfdaad 100644 --- a/backend/services/integrations/integration/sentry.go +++ b/backend/services/integrations/integration/sentry.go @@ -1,44 +1,41 @@ package integration import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" "net/http" "net/url" - "encoding/json" - "strings" - "fmt" - "time" "strconv" - "io" - "io/ioutil" + "strings" + "time" - "openreplay/backend/pkg/utime" "openreplay/backend/pkg/messages" ) - -/* +/* They also have different stuff - Documentation says: + Documentation says: "Note: This endpoint is experimental and may be removed without notice." */ type sentry struct { OrganizationSlug string // `json:"organization_slug"` - ProjectSlug string // `json:"project_slug"` - Token string // `json:"token"` + ProjectSlug string // `json:"project_slug"` + Token string // `json:"token"` } type sentryEvent struct { Tags []struct { - Key string - Value string `json:"value"` + Key string + Value string `json:"value"` } - DateCreated string `json:"dateCreated"` // or dateReceived ? - Title string - EventID string `json:"eventID"` + DateCreated string `json:"dateCreated"` // or dateReceived ? + Title string + EventID string `json:"eventID"` } - func (sn *sentry) Request(c *client) error { requestURL := fmt.Sprintf("https://sentry.io/api/0/projects/%v/%v/events/", sn.OrganizationSlug, sn.ProjectSlug) req, err := http.NewRequest("GET", requestURL, nil) @@ -88,9 +85,9 @@ PageLoop: c.errChan <- fmt.Errorf("%v | Event: %v", err, e) continue } - timestamp := uint64(utime.ToMilliseconds(parsedTime)) + timestamp := uint64(parsedTime.UnixMilli()) // TODO: not to receive all the messages (use default integration timestamp) - if firstEvent { // TODO: reverse range? + if firstEvent { // TODO: reverse range? c.setLastMessageId(timestamp, e.EventID) firstEvent = false } @@ -117,12 +114,12 @@ PageLoop: c.evChan <- &SessionErrorEvent{ SessionID: sessionID, - Token: token, + Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "sentry", + Source: "sentry", Timestamp: timestamp, - Name: e.Title, - Payload: string(jsonEvent), + Name: e.Title, + Payload: string(jsonEvent), }, } } @@ -137,7 +134,7 @@ PageLoop: return fmt.Errorf("Link header format error. Got: '%v'", linkHeader) } - nextLinkInfo := pagInfo[ 1 ] + nextLinkInfo := pagInfo[1] if strings.Contains(nextLinkInfo, `results="false"`) { break } @@ -151,4 +148,4 @@ PageLoop: } } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/stackdriver.go b/backend/services/integrations/integration/stackdriver.go index bb8e3cef9..e852d5d36 100644 --- a/backend/services/integrations/integration/stackdriver.go +++ b/backend/services/integrations/integration/stackdriver.go @@ -1,22 +1,19 @@ package integration - import ( - "google.golang.org/api/option" "cloud.google.com/go/logging/logadmin" "google.golang.org/api/iterator" - - //"strconv" - "encoding/json" - "time" - "fmt" - "context" + "google.golang.org/api/option" + + //"strconv" + "context" + "encoding/json" + "fmt" + "time" - "openreplay/backend/pkg/utime" "openreplay/backend/pkg/messages" ) - // Old: asayerSessionId const SD_FILTER_QUERY = ` @@ -28,7 +25,7 @@ const SD_FILTER_QUERY = ` type stackdriver struct { ServiceAccountCredentials string // `json:"service_account_credentials"` - LogName string // `json:"log_name"` + LogName string // `json:"log_name"` } type saCreds struct { @@ -37,10 +34,10 @@ type saCreds struct { func (sd *stackdriver) Request(c *client) error { fromTs := c.getLastMessageTimestamp() + 1 // Timestamp is RFC3339Nano, so we take the next millisecond - fromFormatted := time.Unix(0, int64(fromTs *1e6)).Format(time.RFC3339Nano) + fromFormatted := time.UnixMilli(int64(fromTs)).Format(time.RFC3339Nano) ctx := context.Background() - var parsedCreds saCreds + var parsedCreds saCreds err := json.Unmarshal([]byte(sd.ServiceAccountCredentials), &parsedCreds) if err != nil { return err @@ -49,56 +46,56 @@ func (sd *stackdriver) Request(c *client) error { opt := option.WithCredentialsJSON([]byte(sd.ServiceAccountCredentials)) client, err := logadmin.NewClient(ctx, parsedCreds.ProjectId, opt) if err != nil { - return err + return err } defer client.Close() - - filter := fmt.Sprintf(SD_FILTER_QUERY, parsedCreds.ProjectId, sd.LogName, fromFormatted) - // By default, Entries are listed from oldest to newest. - /* ResourceNames(rns []string) - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - */ - it := client.Entries(ctx, logadmin.Filter(filter)) - // TODO: Pagination: - //pager := iterator.NewPager(it, 1000, "") - //nextToken, err := pager.NextPage(&entries) - //if nextToken == "" { break } - for { - e, err := it.Next() - if err == iterator.Done { - break - } - if err != nil { - return err - } + filter := fmt.Sprintf(SD_FILTER_QUERY, parsedCreds.ProjectId, sd.LogName, fromFormatted) + // By default, Entries are listed from oldest to newest. + /* ResourceNames(rns []string) + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + */ + it := client.Entries(ctx, logadmin.Filter(filter)) - token := e.Labels["openReplaySessionToken"] - // sessionID, err := strconv.ParseUint(strSessionID, 10, 64) - // if err != nil { - // c.errChan <- err - // continue - // } - jsonEvent, err := json.Marshal(e) - if err != nil { - c.errChan <- err - continue - } - timestamp := uint64(utime.ToMilliseconds(e.Timestamp)) - c.setLastMessageTimestamp(timestamp) - c.evChan <- &SessionErrorEvent{ + // TODO: Pagination: + //pager := iterator.NewPager(it, 1000, "") + //nextToken, err := pager.NextPage(&entries) + //if nextToken == "" { break } + for { + e, err := it.Next() + if err == iterator.Done { + break + } + if err != nil { + return err + } + + token := e.Labels["openReplaySessionToken"] + // sessionID, err := strconv.ParseUint(strSessionID, 10, 64) + // if err != nil { + // c.errChan <- err + // continue + // } + jsonEvent, err := json.Marshal(e) + if err != nil { + c.errChan <- err + continue + } + timestamp := uint64(e.Timestamp.UnixMilli()) + c.setLastMessageTimestamp(timestamp) + c.evChan <- &SessionErrorEvent{ //SessionID: sessionID, Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "stackdriver", + Source: "stackdriver", Timestamp: timestamp, - Name: e.InsertID, // not sure about that - Payload: string(jsonEvent), + Name: e.InsertID, // not sure about that + Payload: string(jsonEvent), }, } } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/integration/sumologic.go b/backend/services/integrations/integration/sumologic.go index 2660dd6ac..8ff39ec9e 100644 --- a/backend/services/integrations/integration/sumologic.go +++ b/backend/services/integrations/integration/sumologic.go @@ -1,20 +1,19 @@ package integration import ( - "net/http" - "time" "encoding/json" "fmt" - "strings" "io" - "io/ioutil" + "io/ioutil" + "net/http" + "strings" + "time" - "openreplay/backend/pkg/utime" "openreplay/backend/pkg/messages" ) -/* - The maximum value for limit is 10,000 messages or 100 MB in total message size, +/* + The maximum value for limit is 10,000 messages or 100 MB in total message size, which means the query may return less than 10,000 messages if you exceed the size limit. API Documentation: https://help.sumologic.com/APIs/Search-Job-API/About-the-Search-Job-API @@ -22,31 +21,30 @@ import ( const SL_LIMIT = 10000 type sumologic struct { - AccessId string // `json:"access_id"` - AccessKey string // `json:"access_key"` - cookies []*http.Cookie + AccessId string // `json:"access_id"` + AccessKey string // `json:"access_key"` + cookies []*http.Cookie } - type sumplogicJobResponce struct { Id string } type sumologicJobStatusResponce struct { - State string + State string MessageCount int //PendingErrors []string } type sumologicResponce struct { - Messages [] struct { + Messages []struct { Map json.RawMessage } } type sumologicEvent struct { Timestamp uint64 `json:"_messagetime,string"` - Raw string `json:"_raw"` + Raw string `json:"_raw"` } func (sl *sumologic) deleteJob(jobId string, errChan chan<- error) { @@ -68,10 +66,9 @@ func (sl *sumologic) deleteJob(jobId string, errChan chan<- error) { resp.Body.Close() } - func (sl *sumologic) Request(c *client) error { fromTs := c.getLastMessageTimestamp() + 1 // From next millisecond - toTs := utime.CurrentTimestamp() + toTs := time.Now().UnixMilli() requestURL := fmt.Sprintf("https://api.%vsumologic.com/api/v1/search/jobs", "eu.") // deployment server?? jsonBody := fmt.Sprintf(`{ "query": "\"openReplaySessionToken=\" AND (*error* OR *fail* OR *exception*)", @@ -132,7 +129,7 @@ func (sl *sumologic) Request(c *client) error { tick := time.Tick(5 * time.Second) for { - <- tick + <-tick resp, err = http.DefaultClient.Do(req) if err != nil { return err // TODO: retry, counter/timeout @@ -147,12 +144,12 @@ func (sl *sumologic) Request(c *client) error { } if jobStatus.State == "DONE GATHERING RESULTS" { offset := 0 - for ;offset < jobStatus.MessageCount; { + for offset < jobStatus.MessageCount { requestURL = fmt.Sprintf( - "https://api.%vsumologic.com/api/v1/search/jobs/%v/messages?offset=%v&limit=%v", - "eu.", - jobResponce.Id, - offset, + "https://api.%vsumologic.com/api/v1/search/jobs/%v/messages?offset=%v&limit=%v", + "eu.", + jobResponce.Id, + offset, SL_LIMIT, ) req, err = http.NewRequest("GET", requestURL, nil) @@ -190,17 +187,17 @@ func (sl *sumologic) Request(c *client) error { } name := e.Raw if len(name) > 20 { - name = name[:20] // not sure about that + name = name[:20] // not sure about that } c.setLastMessageTimestamp(e.Timestamp) c.evChan <- &SessionErrorEvent{ //SessionID: sessionID, Token: token, RawErrorEvent: &messages.RawErrorEvent{ - Source: "sumologic", + Source: "sumologic", Timestamp: e.Timestamp, - Name: name, - Payload: string(m.Map), //e.Raw ? + Name: name, + Payload: string(m.Map), //e.Raw ? }, } @@ -209,11 +206,11 @@ func (sl *sumologic) Request(c *client) error { } break } - if jobStatus.State != "NOT STARTED" && + if jobStatus.State != "NOT STARTED" && jobStatus.State != "GATHERING RESULTS" { // error break } } return nil -} \ No newline at end of file +} diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go index e93a7a0cd..e3dd3f05c 100644 --- a/backend/services/integrations/main.go +++ b/backend/services/integrations/main.go @@ -80,13 +80,10 @@ func main() { } sessionID = sessData.ID } - // TODO: send to ready-events topic. Otherwise it have to go through the events worker. + // TODO: send to ready-events topic. Otherwise it have to go through the events worker. producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent)) case err := <-manager.Errors: log.Printf("Integration error: %v\n", err) - listener.Close() - pg.Close() - os.Exit(0) case i := <-manager.RequestDataUpdates: // log.Printf("Last request integration update: %v || %v\n", i, string(i.RequestData)) if err := pg.UpdateIntegrationRequestData(&i); err != nil { diff --git a/backend/services/sink/main.go b/backend/services/sink/main.go index 5893e93e6..a649bb6ef 100644 --- a/backend/services/sink/main.go +++ b/backend/services/sink/main.go @@ -1,8 +1,8 @@ package main import ( - "log" "encoding/binary" + "log" "time" "os" @@ -10,67 +10,64 @@ import ( "syscall" "openreplay/backend/pkg/env" + . "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" - . "openreplay/backend/pkg/messages" ) - - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - FS_DIR := env.String("FS_DIR"); + FS_DIR := env.String("FS_DIR") if _, err := os.Stat(FS_DIR); os.IsNotExist(err) { log.Fatalf("%v doesn't exist. %v", FS_DIR, err) } writer := NewWriter(env.Uint16("FS_ULIMIT"), FS_DIR) - count := 0 + count := 0 consumer := queue.NewMessageConsumer( env.String("GROUP_SINK"), - []string{ + []string{ env.String("TOPIC_RAW_WEB"), env.String("TOPIC_RAW_IOS"), - }, - func(sessionID uint64, message Message, _ *types.Meta) { - //typeID, err := GetMessageTypeID(value) - // if err != nil { - // log.Printf("Message type decoding error: %v", err) - // return - // } - typeID := message.Meta().TypeID - if !IsReplayerType(typeID) { - return - } + }, + func(sessionID uint64, message Message, _ *types.Meta) { + //typeID, err := GetMessageTypeID(value) + // if err != nil { + // log.Printf("Message type decoding error: %v", err) + // return + // } + typeID := message.Meta().TypeID + if !IsReplayerType(typeID) { + return + } - count++ + count++ - value := message.Encode() - var data []byte - if IsIOSType(typeID) { - data = value - } else { + value := message.Encode() + var data []byte + if IsIOSType(typeID) { + data = value + } else { data = make([]byte, len(value)+8) copy(data[8:], value[:]) binary.LittleEndian.PutUint64(data[0:], message.Meta().Index) - } - if err := writer.Write(sessionID, data); err != nil { + } + if err := writer.Write(sessionID, data); err != nil { log.Printf("Writer error: %v\n", err) } - }, + }, + false, ) - consumer.DisableAutoCommit() - sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) + signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) - tick := time.Tick(30 * time.Second) + tick := time.Tick(30 * time.Second) - log.Printf("Sink service started\n") + log.Printf("Sink service started\n") for { select { case sig := <-sigchan: @@ -85,7 +82,7 @@ func main() { log.Printf("%v messages during 30 sec", count) count = 0 - + consumer.Commit() default: err := consumer.ConsumeNext() @@ -96,4 +93,3 @@ func main() { } } - diff --git a/backend/services/storage/clean.go b/backend/services/storage/clean.go index 829bc8705..72f5f359c 100644 --- a/backend/services/storage/clean.go +++ b/backend/services/storage/clean.go @@ -1,23 +1,23 @@ package main import ( - "os" - "log" - "time" - "strconv" "io/ioutil" + "log" + "os" + "strconv" + "time" "openreplay/backend/pkg/flakeid" ) -const DELETE_TIMEOUT = 12 * time.Hour; +const DELETE_TIMEOUT = 48 * time.Hour func cleanDir(dirname string) { - files, err := ioutil.ReadDir(dirname) - if err != nil { - log.Printf("Cannot read file directory. %v", err) - return - } + files, err := ioutil.ReadDir(dirname) + if err != nil { + log.Printf("Cannot read file directory. %v", err) + return + } for _, f := range files { name := f.Name() @@ -27,8 +27,9 @@ func cleanDir(dirname string) { continue } ts := int64(flakeid.ExtractTimestamp(id)) - if time.Unix(ts/1000, 0).Add(DELETE_TIMEOUT).Before(time.Now()) { + if time.UnixMilli(ts).Add(DELETE_TIMEOUT).Before(time.Now()) { + // returns a error. Don't log it sinse it can be race condition between worker instances os.Remove(dirname + "/" + name) } } -} \ No newline at end of file +} diff --git a/backend/services/storage/main.go b/backend/services/storage/main.go index 5033fb845..9579fbe4f 100644 --- a/backend/services/storage/main.go +++ b/backend/services/storage/main.go @@ -2,45 +2,41 @@ package main import ( "log" - "time" "os" "strconv" + "time" "os/signal" "syscall" "openreplay/backend/pkg/env" - "openreplay/backend/pkg/storage" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/queue/types" + "openreplay/backend/pkg/storage" ) - - func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - - storageWeb := storage.NewS3(env.String("AWS_REGION_WEB"), env.String("S3_BUCKET_WEB")) - //storageIos := storage.NewS3(env.String("AWS_REGION_IOS"), env.String("S3_BUCKET_IOS")) + storage := storage.NewS3(env.String("AWS_REGION_WEB"), env.String("S3_BUCKET_WEB")) FS_DIR := env.String("FS_DIR") FS_CLEAN_HRS := env.Int("FS_CLEAN_HRS") - var uploadKey func(string, int, *storage.S3) - uploadKey = func(key string, retryCount int, s *storage.S3) { + var uploadKey func(string, int) + uploadKey = func(key string, retryCount int) { if retryCount <= 0 { - return; + return } file, err := os.Open(FS_DIR + "/" + key) defer file.Close() if err != nil { log.Printf("File error: %v; Will retry %v more time(s)\n", err, retryCount) time.AfterFunc(2*time.Minute, func() { - uploadKey(key, retryCount - 1, s) + uploadKey(key, retryCount-1) }) } else { - if err := s.Upload(gzipFile(file), key, "application/octet-stream", true); err != nil { + if err := storage.Upload(gzipFile(file), key, "application/octet-stream", true); err != nil { log.Fatalf("Storage upload error: %v\n", err) } } @@ -48,27 +44,24 @@ func main() { consumer := queue.NewMessageConsumer( env.String("GROUP_STORAGE"), - []string{ + []string{ env.String("TOPIC_TRIGGER"), - }, - func(sessionID uint64, msg messages.Message, meta *types.Meta) { - switch msg.(type) { - case *messages.SessionEnd: - uploadKey(strconv.FormatUint(sessionID, 10), 5, storageWeb) - //case *messages.IOSSessionEnd: - // uploadKey(strconv.FormatUint(sessionID, 10), 5, storageIos) - } - }, + }, + func(sessionID uint64, msg messages.Message, meta *types.Meta) { + switch msg.(type) { + case *messages.SessionEnd: + uploadKey(strconv.FormatUint(sessionID, 10), 5) + } + }, + true, ) sigchan := make(chan os.Signal, 1) - signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) + signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM) + cleanTick := time.Tick(time.Duration(FS_CLEAN_HRS) * time.Hour) - cleanTick := time.Tick(time.Duration(FS_CLEAN_HRS) * time.Hour) - - - log.Printf("Storage service started\n") + log.Printf("Storage service started\n") for { select { case sig := <-sigchan: @@ -76,7 +69,7 @@ func main() { consumer.Close() os.Exit(0) case <-cleanTick: - cleanDir(FS_DIR) + go cleanDir(FS_DIR) default: err := consumer.ConsumeNext() if err != nil { @@ -85,4 +78,3 @@ func main() { } } } - diff --git a/ee/api/.env.default b/ee/api/.env.default index 28f46f273..094579f1b 100644 --- a/ee/api/.env.default +++ b/ee/api/.env.default @@ -37,8 +37,8 @@ jwt_algorithm=HS512 jwt_exp_delta_seconds=2592000 jwt_issuer=openreplay-default-ee jwt_secret="SET A RANDOM STRING HERE" -peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list -peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live +assist=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live +assistList=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list pg_dbname=postgres pg_host=postgresql.db.svc.cluster.local pg_password=asayerPostgres @@ -46,11 +46,13 @@ pg_port=5432 pg_user=postgres pg_timeout=30 pg_minconn=45 +PG_RETRY_MAX=50 +PG_RETRY_INTERVAL=2 put_S3_TTL=20 sentryURL= sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps -sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps +sourcemaps_reader=http://127.0.0.1:9000/ stage=default-ee version_number=1.0.0 diff --git a/ee/api/.gitignore b/ee/api/.gitignore index f1ff9550b..c5a8d9ce4 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -180,9 +180,6 @@ Pipfile /chalicelib/core/alerts.py /chalicelib/core/alerts_processor.py /chalicelib/core/announcements.py -/chalicelib/blueprints/bp_app_api.py -/chalicelib/blueprints/bp_core.py -/chalicelib/blueprints/bp_core_crons.py /chalicelib/core/collaboration_slack.py /chalicelib/core/errors_favorite_viewed.py /chalicelib/core/events.py @@ -237,7 +234,6 @@ Pipfile /chalicelib/utils/smtp.py /chalicelib/utils/strings.py /chalicelib/utils/TimeUTC.py -/chalicelib/blueprints/app/__init__.py /routers/app/__init__.py /routers/crons/__init__.py /routers/subs/__init__.py @@ -245,8 +241,8 @@ Pipfile /chalicelib/core/assist.py /auth/auth_apikey.py /auth/auth_jwt.py -/chalicelib/blueprints/subs/bp_insights.py /build.sh +/routers/base.py /routers/core.py /routers/crons/core_crons.py /routers/subs/dashboard.py @@ -257,10 +253,12 @@ Pipfile /chalicelib/core/heatmaps.py /routers/subs/insights.py /schemas.py -/chalicelib/blueprints/app/v1_api.py -/routers/app/v1_api.py /chalicelib/core/custom_metrics.py /chalicelib/core/performance_event.py /chalicelib/core/saved_search.py /app_alerts.py /build_alerts.sh +/routers/subs/metrics.py +/routers/subs/v1_api.py +/chalicelib/core/dashboards.py +entrypoint.sh \ No newline at end of file diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index cca6e6806..aee6aecb2 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -6,6 +6,15 @@ WORKDIR /work COPY . . RUN pip install -r requirements.txt RUN mv .env.default .env +ENV APP_NAME chalice +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* && \ + cd sourcemap-reader && \ + npm install # Add Tini # Startup daemon diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts index 9be6ebc93..6aec0f98b 100644 --- a/ee/api/Dockerfile.alerts +++ b/ee/api/Dockerfile.alerts @@ -5,8 +5,9 @@ RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf WORKDIR /work COPY . . RUN pip install -r requirements.txt -RUN mv .env.default .env && mv app_alerts.py app.py +RUN mv .env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh ENV pg_minconn 2 +ENV APP_NAME alerts # Add Tini # Startup daemon diff --git a/ee/api/_clickhouse_upgrade.sh b/ee/api/_clickhouse_upgrade.sh deleted file mode 100644 index 9b656a584..000000000 --- a/ee/api/_clickhouse_upgrade.sh +++ /dev/null @@ -1,10 +0,0 @@ -sudo yum update -sudo yum install yum-utils -sudo rpm --import https://repo.clickhouse.com/CLICKHOUSE-KEY.GPG -sudo yum-config-manager --add-repo https://repo.clickhouse.com/rpm/stable/x86_64 -sudo yum update -sudo service clickhouse-server restart - - -#later mus use in clickhouse-client: -#SET allow_experimental_window_functions = 1; \ No newline at end of file diff --git a/ee/api/app.py b/ee/api/app.py index fdf7f60b8..0041ec12e 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -11,10 +11,10 @@ from starlette.responses import StreamingResponse, JSONResponse from chalicelib.utils import helper from chalicelib.utils import pg_client from routers import core, core_dynamic, ee, saml -from routers.app import v1_api, v1_api_ee +from routers.subs import v1_api from routers.crons import core_crons from routers.crons import core_dynamic_crons -from routers.subs import dashboard +from routers.subs import dashboard, insights, metrics, v1_api_ee app = FastAPI() @@ -65,7 +65,8 @@ app.include_router(saml.public_app) app.include_router(saml.app) app.include_router(saml.app_apikey) app.include_router(dashboard.app) -# app.include_router(insights.app) +app.include_router(metrics.app) +app.include_router(insights.app) app.include_router(v1_api.app_apikey) app.include_router(v1_api_ee.app_apikey) diff --git a/ee/api/chalicelib/core/boarding.py b/ee/api/chalicelib/core/boarding.py index 6690e59f2..8a2076b58 100644 --- a/ee/api/chalicelib/core/boarding.py +++ b/ee/api/chalicelib/core/boarding.py @@ -6,41 +6,40 @@ from chalicelib.core import projects def get_state(tenant_id): - my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - pids = [s["projectId"] for s in my_projects] + pids = projects.get_projects_ids(tenant_id=tenant_id) with pg_client.PostgresClient() as cur: recorded = False meta = False if len(pids) > 0: cur.execute( - cur.mogrify("""\ - SELECT - COUNT(*) - FROM public.sessions AS s - where s.project_id IN %(ids)s - LIMIT 1;""", + cur.mogrify("""SELECT EXISTS(( SELECT 1 + FROM public.sessions AS s + WHERE s.project_id IN %(ids)s)) AS exists;""", {"ids": tuple(pids)}) ) - recorded = cur.fetchone()["count"] > 0 + recorded = cur.fetchone()["exists"] meta = False if recorded: cur.execute( - cur.mogrify("""SELECT SUM((SELECT COUNT(t.meta) - FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5), - (p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10), - (sessions.user_id)) AS t(meta) - WHERE t.meta NOTNULL)) - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 'defined' - FROM public.sessions - WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON(TRUE) - WHERE p.tenant_id = %(tenant_id)s - AND p.deleted_at ISNULL;""" + cur.mogrify("""SELECT EXISTS((SELECT 1 + FROM public.projects AS p + LEFT JOIN LATERAL ( SELECT 1 + FROM public.sessions + WHERE sessions.project_id = p.project_id + AND sessions.user_id IS NOT NULL + LIMIT 1) AS sessions(user_id) ON (TRUE) + WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL + AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL + OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL + OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL + OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL + OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL + OR p.metadata_10 IS NOT NULL ) + )) AS exists;""" , {"tenant_id": tenant_id})) - meta = cur.fetchone()["sum"] > 0 + meta = cur.fetchone()["exists"] return [ {"task": "Install OpenReplay", @@ -61,22 +60,18 @@ def get_state(tenant_id): def get_state_installing(tenant_id): - my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - pids = [s["projectId"] for s in my_projects] + pids = projects.get_projects_ids(tenant_id=tenant_id) with pg_client.PostgresClient() as cur: recorded = False if len(pids) > 0: cur.execute( - cur.mogrify("""\ - SELECT - COUNT(*) - FROM public.sessions AS s - where s.project_id IN %(ids)s - LIMIT 1;""", + cur.mogrify("""SELECT EXISTS(( SELECT 1 + FROM public.sessions AS s + WHERE s.project_id IN %(ids)s)) AS exists;""", {"ids": tuple(pids)}) ) - recorded = cur.fetchone()["count"] > 0 + recorded = cur.fetchone()["exists"] return {"task": "Install OpenReplay", "done": recorded, @@ -86,21 +81,24 @@ def get_state_installing(tenant_id): def get_state_identify_users(tenant_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""SELECT SUM((SELECT COUNT(t.meta) - FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5), - (p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10), - (sessions.user_id)) AS t(meta) - WHERE t.meta NOTNULL)) - FROM public.projects AS p - LEFT JOIN LATERAL ( SELECT 'defined' - FROM public.sessions - WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL - LIMIT 1) AS sessions(user_id) ON(TRUE) - WHERE p.tenant_id = %(tenant_id)s - AND p.deleted_at ISNULL;""" + cur.mogrify("""SELECT EXISTS((SELECT 1 + FROM public.projects AS p + LEFT JOIN LATERAL ( SELECT 1 + FROM public.sessions + WHERE sessions.project_id = p.project_id + AND sessions.user_id IS NOT NULL + LIMIT 1) AS sessions(user_id) ON (TRUE) + WHERE p.tenant_id = %(tenant_id)s AND p.deleted_at ISNULL + AND ( sessions.user_id IS NOT NULL OR p.metadata_1 IS NOT NULL + OR p.metadata_2 IS NOT NULL OR p.metadata_3 IS NOT NULL + OR p.metadata_4 IS NOT NULL OR p.metadata_5 IS NOT NULL + OR p.metadata_6 IS NOT NULL OR p.metadata_7 IS NOT NULL + OR p.metadata_8 IS NOT NULL OR p.metadata_9 IS NOT NULL + OR p.metadata_10 IS NOT NULL ) + )) AS exists;""" , {"tenant_id": tenant_id})) - meta = cur.fetchone()["sum"] > 0 + meta = cur.fetchone()["exists"] return {"task": "Identify Users", "done": meta, diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index 8531d89a3..c7e066f8b 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -1,7 +1,7 @@ import json import schemas -from chalicelib.core import dashboard +from chalicelib.core import metrics from chalicelib.core import sourcemaps, sessions from chalicelib.utils import ch_client, metrics_helper from chalicelib.utils import pg_client, helper @@ -82,7 +82,7 @@ def __rearrange_chart_details(start_at, end_at, density, chart): chart = list(chart) for i in range(len(chart)): chart[i] = {"timestamp": chart[i][0], "count": chart[i][1]} - chart = dashboard.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density, + chart = metrics.__complete_missing_steps(rows=chart, start_time=start_at, end_time=end_at, density=density, neutral={"count": 0}) return chart @@ -788,7 +788,7 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo r["chart"] = list(r["chart"]) for i in range(len(r["chart"])): r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]} - r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, + r["chart"] = metrics.__complete_missing_steps(rows=r["chart"], start_time=data.startDate, end_time=data.endDate, density=data.density, neutral={"count": 0}) offset = len(rows) diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py index 387029fd4..3dba723e4 100644 --- a/ee/api/chalicelib/core/insights.py +++ b/ee/api/chalicelib/core/insights.py @@ -1,9 +1,9 @@ -from chalicelib.core import sessions_metas -from chalicelib.utils import helper, dev +import schemas +from chalicelib.core.metrics import __get_basic_constraints, __get_meta_constraint +from chalicelib.core.metrics import __get_constraint_values, __complete_missing_steps from chalicelib.utils import ch_client +from chalicelib.utils import helper, dev from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps -from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint def __transform_journey(rows): @@ -29,7 +29,7 @@ JOURNEY_TYPES = { } -@dev.timed + def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): event_start = None event_table = JOURNEY_TYPES["CLICK"]["table"] @@ -42,7 +42,7 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp= elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append(f"sessions_metadata.project_id = %(project_id)s") meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") @@ -190,7 +190,7 @@ def __complete_acquisition(rows, start_date, end_date=None): return rows -@dev.timed + def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -233,7 +233,7 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT } -@dev.timed + def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -286,7 +286,7 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en } -@dev.timed + def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -303,7 +303,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.user_id IS NOT NULL") meta_condition.append("not empty(sessions_metadata.user_id)") @@ -386,7 +386,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en } -@dev.timed + def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -404,7 +404,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.user_id IS NOT NULL") meta_condition.append("not empty(sessions_metadata.user_id)") @@ -497,7 +497,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), } -@dev.timed + def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): startTimestamp = TimeUTC.trunc_week(startTimestamp) @@ -512,7 +512,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.user_id IS NOT NULL") meta_condition.append("not empty(sessions_metadata.user_id)") @@ -572,7 +572,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da return popularity -@dev.timed + def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): event_type = "CLICK" @@ -586,7 +586,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.user_id IS NOT NULL") meta_condition.append("not empty(sessions_metadata.user_id)") @@ -658,7 +658,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): event_type = "CLICK" @@ -672,7 +672,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("user_id IS NOT NULL") meta_condition.append("not empty(sessions_metadata.user_id)") @@ -728,7 +728,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): event_type = "CLICK" @@ -742,7 +742,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.project_id = %(project_id)s") meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") @@ -796,7 +796,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} -@dev.timed + def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): event_table = JOURNEY_TYPES["CLICK"]["table"] @@ -807,7 +807,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): event_table = JOURNEY_TYPES[f["value"]]["table"] event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.project_id = %(project_id)s") meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") @@ -838,7 +838,7 @@ PERIOD_TO_FUNCTION = { } -@dev.timed + def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): meta_condition = __get_meta_constraint(args) @@ -847,7 +847,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime for f in filters: if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: period = f["value"] - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") extra_values["user_id"] = f["value"] period_function = PERIOD_TO_FUNCTION[period] @@ -885,7 +885,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime return {"avg": avg, "chart": rows} -@dev.timed + def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) meta_condition = __get_meta_constraint(args) @@ -925,7 +925,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes return {"avg": avg, "partition": helper.list_to_camel_case(rows)} -@dev.timed + def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): ch_sub_query = __get_basic_constraints(table_name="feature", data=args) @@ -940,7 +940,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi elif f["type"] == "EVENT_VALUE": event_value = f["value"] default = False - elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") meta_condition.append("sessions_metadata.project_id = %(project_id)s") meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") @@ -1008,7 +1008,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi } -@dev.timed + def search(text, feature_type, project_id, platform=None): if not feature_type: resource_type = "ALL" @@ -1044,4 +1044,4 @@ def search(text, feature_type, project_id, platform=None): rows = ch.execute(ch_query, params) else: return [] - return [helper.dict_to_camel_case(row) for row in rows] \ No newline at end of file + return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/api/chalicelib/core/dashboard.py b/ee/api/chalicelib/core/metrics.py similarity index 65% rename from ee/api/chalicelib/core/dashboard.py rename to ee/api/chalicelib/core/metrics.py index c5c373c78..65889e28d 100644 --- a/ee/api/chalicelib/core/dashboard.py +++ b/ee/api/chalicelib/core/metrics.py @@ -1,6 +1,6 @@ import math -import random +import schemas from chalicelib.utils import pg_client from chalicelib.utils import args_transformer from chalicelib.utils import helper @@ -169,7 +169,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) ch_query = f"""\ SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(sessions.session_id) AS count + COUNT(sessions.session_id) AS value FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -181,19 +181,17 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) rows = ch.execute(query=ch_query, params=params) results = { - "count": sum([r["count"] for r in rows]), + "value": sum([r["value"] for r in rows]), "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"count": 0}) + neutral={"value": 0}) } diff = endTimestamp - startTimestamp endTimestamp = startTimestamp startTimestamp = endTimestamp - diff - ch_query = f"""\ - SELECT - COUNT(sessions.session_id) AS count + ch_query = f""" SELECT COUNT(sessions.session_id) AS count FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, @@ -203,8 +201,8 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) count = count[0]["count"] - results["countProgress"] = helper.__progress(old_val=count, new_val=results["count"]) - + results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count return results @@ -222,9 +220,8 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta with ch_client.ClickHouseClient() as ch: ch_query = f"""\ - SELECT - toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(DISTINCT errors.session_id) AS count + SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COUNT(DISTINCT errors.session_id) AS count FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -251,15 +248,15 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta return results -def __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, ch_sub_query, meta=False, **args): +def __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp, ch_sub_query, meta=False, **args): ch_query = f"""\ SELECT COUNT(DISTINCT errors.message) AS count FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if meta else ""} WHERE {" AND ".join(ch_sub_query)};""" - count = cur.execute(query=ch_query, - params={"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)}) + count = ch.execute(query=ch_query, + params={"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)}) if count is not None and len(count) > 0: return count[0]["count"] @@ -304,9 +301,8 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), errors = {} for error_id in error_ids: ch_query = f"""\ - SELECT - toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(errors.session_id) AS count + SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COUNT(errors.session_id) AS count FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -346,14 +342,12 @@ def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="pages", data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition + ch_sub_query.append("(pages.dom_content_loaded_event_end>0 OR pages.first_contentful_paint>0)") # changed dom_content_loaded_event_start to dom_content_loaded_event_end - ch_query = f"""\ - SELECT - COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start, --- COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_start ,0)),0) AS avg_dom_content_load_start, - COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel - FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)};""" + ch_query = f"""SELECT COALESCE(avgOrNull(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start, + COALESCE(avgOrNull(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) @@ -381,10 +375,9 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - ch_query = f"""\ - SELECT AVG(NULLIF(pages.load_event_end ,0)) AS avg_page_load_time - FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)};""" + ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS avg_page_load_time + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} AND pages.load_event_end>0;""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} row = ch.execute(query=ch_query, params=params)[0] @@ -394,11 +387,9 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query.append("resources.type= %(type)s") - ch_query = f"""\ - SELECT - AVG(NULLIF(resources.duration,0)) AS avg - FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)};""" + ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0;""" row = ch.execute(query=ch_query, params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)})[0] @@ -437,21 +428,19 @@ def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1), return results -def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args): +def __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args): ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition - - ch_query = f"""\ - SELECT - COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages, - COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration - FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)};""" + ch_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)") + ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages, + COALESCE(avgOrNull(NULLIF(sessions.duration,0)),0) AS avg_session_duration + FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - rows = cur.execute(query=ch_query, params=params) + rows = ch.execute(query=ch_query, params=params) return rows @@ -464,47 +453,53 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query.append("resources.type = 'img'") ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) ch_sub_query_chart.append("resources.type = 'img'") - ch_sub_query_chart.append("resources.url = %(url)s") + ch_sub_query_chart.append("resources.url IN %(url)s") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT resources.url, - AVG(NULLIF(resources.duration,0)) AS avg, - COUNT(resources.session_id) AS count + COALESCE(avgOrNull(resources.duration),0) AS avg, + COUNT(resources.session_id) AS count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} + WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0 GROUP BY resources.url ORDER BY avg DESC LIMIT 10;""" - - rows = ch.execute(query=ch_query, - params={"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)}) + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) rows = [{"url": i["url"], "avgDuration": i["avg"], "sessions": i["count"]} for i in rows] - + if len(rows) == 0: + return [] urls = [row["url"] for row in rows] charts = {} + ch_query = f"""SELECT url, + toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS avg + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0 + GROUP BY url, timestamp + ORDER BY url, timestamp;""" + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, "url": urls, **__get_constraint_values(args)} + u_rows = ch.execute(query=ch_query, params=params) for url in urls: - ch_query = f"""\ - SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(NULLIF(resources.duration,0)) AS avg - FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query_chart)} - GROUP BY timestamp - ORDER BY timestamp;""" - params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "url": url, **__get_constraint_values(args)} - r = ch.execute(query=ch_query, params=params) + sub_rows = [] + for r in u_rows: + if r["url"] == url: + sub_rows.append(r) + elif len(sub_rows) > 0: + break charts[url] = [{"timestamp": int(i["timestamp"]), "avgDuration": i["avg"]} - for i in __complete_missing_steps(rows=r, start_time=startTimestamp, + for i in __complete_missing_steps(rows=sub_rows, start_time=startTimestamp, end_time=endTimestamp, density=density, neutral={"avg": 0})] for i in range(len(rows)): rows[i] = helper.dict_to_camel_case(rows[i]) - rows[i]["chart"] = [helper.dict_to_camel_case(chart) for chart in charts[rows[i]["url"]]] + rows[i]["chart"] = helper.list_to_camel_case(charts[rows[i]["url"]]) return sorted(rows, key=lambda k: k["sessions"], reverse=True) @@ -544,12 +539,11 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(NULLIF(resources.duration,0)) AS avg + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} - AND resources.type = 'img' + AND resources.type = 'img' AND resources.duration>0 {(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""} GROUP BY timestamp ORDER BY timestamp;""" @@ -558,12 +552,11 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, neutral={"avg": 0})] - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(NULLIF(resources.duration,0)) AS avg + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} - AND resources.type = 'fetch' + AND resources.type = 'fetch' AND resources.duration>0 {(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""} GROUP BY timestamp ORDER BY timestamp;""" @@ -577,11 +570,10 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi data=args) ch_sub_query_chart += meta_condition - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(NULLIF(pages.load_event_end ,0)) AS avg + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(pages.load_event_end),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query_chart)} + WHERE {" AND ".join(ch_sub_query_chart)} AND pages.load_event_end>0 {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""} GROUP BY timestamp ORDER BY timestamp;""" @@ -648,9 +640,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, if resource_type == "ALL" and not pages_only and not events_only: ch_sub_query.append("positionUTF8(url_hostpath,%(value)s)!=0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value, - type AS key + ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value, + type AS key FROM resources WHERE {" AND ".join(ch_sub_query)} GROUP BY type @@ -685,9 +676,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, ch_sub_query.append(f"resources.type = '{__get_resource_db_type_from_type(resource_type)}'") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - DISTINCT url_hostpath AS value, - %(resource_type)s AS key + ch_query = f"""SELECT DISTINCT url_hostpath AS value, + %(resource_type)s AS key FROM resources WHERE {" AND ".join(ch_sub_query)} LIMIT 10;""" @@ -787,34 +777,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, return [helper.dict_to_camel_case(row) for row in rows] -# def frustration_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1), -# endTimestamp=TimeUTC.now(), **args): -# with pg_client.PostgresClient() as cur: -# sub_q = "" -# if platform == 'mobile': -# sub_q = "AND s.user_device_type = 'mobile' AND s.project_id = %(project_id)s AND s.start_ts >= %(startTimestamp)s AND s.start_ts < %(endTimestamp)s" -# elif platform == 'desktop': -# sub_q = "AND s.user_device_type = 'desktop' AND s.project_id = %(project_id)s AND s.start_ts >= %(startTimestamp)s AND s.start_ts < %(endTimestamp)s" -# -# cur.execute(cur.mogrify(f"""\ -# SELECT s.project_id, -# s.session_id::text AS session_id, -# s.* -# FROM public.sessions AS s -# LEFT JOIN public.session_watchdogs AS sw ON s.session_id=sw.session_id -# LEFT JOIN public.watchdogs AS w ON w.watchdog_id=sw.watchdog_id -# WHERE s.project_id = %(project_id)s -# AND w.type='clickrage' -# AND s.start_ts>=%(startTimestamp)s -# AND s.start_ts<=%(endTimestamp)s -# {sub_q} -# ORDER BY s.session_id DESC -# LIMIT 5;""", -# {"project_id": project_id, "startTimestamp": startTimestamp, -# "endTimestamp": endTimestamp})) -# return helper.list_to_camel_case(cur.fetchall()) - - def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), density=7, **args): @@ -826,9 +788,8 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - resources.url_hostpath AS key, - COUNT(resources.session_id) AS doc_count + ch_query = f"""SELECT resources.url_hostpath AS key, + COUNT(resources.session_id) AS doc_count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY url_hostpath @@ -841,10 +802,9 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day if len(rows) == 0: return [] ch_sub_query.append("resources.url_hostpath = %(value)s") - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - COUNT(resources.session_id) AS doc_count, - toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COUNT(resources.session_id) AS doc_count, + toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY timestamp @@ -879,9 +839,8 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - resources.url_hostpath, COUNT(resources.session_id) AS doc_count + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + resources.url_hostpath, COUNT(resources.session_id) AS doc_count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp, resources.url_hostpath @@ -933,11 +892,11 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days ch_sub_query_chart.append(f"resources.url = %(value)s") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition + ch_sub_query_chart.append("resources.duration>0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(NULLIF(resources.duration,0)) AS avg + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -947,7 +906,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days "endTimestamp": endTimestamp, "value": url, "type": type, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(NULLIF(resources.duration,0)) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 @@ -969,9 +928,8 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(pages.dom_building_time) AS avg + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(pages.dom_building_time),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -981,14 +939,15 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=- "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(pages.dom_building_time) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_building_time),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, + return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, - density=density, neutral={"avg": 0})} + density=density, neutral={"value": 0}), + "unit": schemas.TemplatePredefinedUnits.millisecond} def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1009,54 +968,49 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query_chart.append("isNotNull(resources.duration)") ch_sub_query_chart.append("resources.duration>0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - splitByChar('/', resources.url_hostpath)[-1] AS name, - AVG(NULLIF(resources.duration,0)) AS avg + ch_query = f"""SELECT any(url) AS url, any(type) AS type, + splitByChar('/', resources.url_hostpath)[-1] AS name, + COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} + WHERE {" AND ".join(ch_sub_query)} GROUP BY name ORDER BY avg DESC LIMIT 10;""" - rows = ch.execute(query=ch_query, - params={"project_id": project_id, - "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)}) - ch_sub_query_chart.append("endsWith(resources.url_hostpath, %(url)s)>0") + params = {"project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + if len(rows) == 0: + return [] ch_sub_query.append(ch_sub_query_chart[-1]) results = [] + names = {f"name_{i}": r["name"] for i, r in enumerate(rows)} + ch_query = f"""SELECT splitByChar('/', resources.url_hostpath)[-1] AS name, + toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS avg + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + AND ({" OR ".join([f"endsWith(resources.url_hostpath, %(name_{i})s)>0" for i in range(len(names.keys()))])}) + GROUP BY name,timestamp + ORDER BY name,timestamp;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + **names, **__get_constraint_values(args)} + charts = ch.execute(query=ch_query, params=params) for r in rows: - # if isinstance(r["url"], bytes): - # try: - # r["url"] = r["url"].decode("utf-8") - # except UnicodeDecodeError: - # continue - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, - AVG(resources.duration) AS avg - FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query_chart)} - GROUP BY timestamp - ORDER BY timestamp;""" - chart = ch.execute(query=ch_query, - params={"step_size": step_size, "project_id": project_id, - "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, - "url": r["name"], **__get_constraint_values(args)}) - r["chart"] = __complete_missing_steps(rows=chart, start_time=startTimestamp, + sub_chart = [] + for c in charts: + if c["name"] == r["name"]: + cc = dict(c) + cc.pop("name") + sub_chart.append(cc) + elif len(sub_chart) > 0: + break + r["chart"] = __complete_missing_steps(rows=sub_chart, start_time=startTimestamp, end_time=endTimestamp, density=density, neutral={"avg": 0}) - ch_query = f"""SELECT url, type - FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - ORDER BY duration DESC - LIMIT 1;""" - url = ch.execute(query=ch_query, - params={"project_id": project_id, - "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, - "url": r["name"], **__get_constraint_values(args)}) - r["url"] = url[0]["url"] - r["type"] = __get_resource_type_from_db_type(url[0]["type"]) + r["type"] = __get_resource_type_from_db_type(r["type"]) results.append(r) return results @@ -1090,7 +1044,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT pages.user_country, AVG(pages.speed_index) AS avg + ch_query = f"""SELECT pages.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY pages.user_country @@ -1099,7 +1053,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(pages.speed_index) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(pages.speed_index),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 @@ -1119,7 +1073,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 ch_sub_query_chart.append(f"url_path = %(value)s") with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(pages.response_time) AS avg + COALESCE(avgOrNull(pages.response_time),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1130,14 +1084,15 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1 "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(pages.response_time) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, + return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, - density=density, neutral={"avg": 0})} + density=density, neutral={"value": 0}), + "unit": schemas.TemplatePredefinedUnits.millisecond} def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1159,7 +1114,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( params={"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)}) - ch_query = f"""SELECT AVG(pages.response_time) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, @@ -1268,9 +1223,8 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1 ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - intDiv(toHour(sessions.datetime),2)*2 AS hour, - COUNT(sessions.session_id) AS count + ch_query = f"""SELECT intDiv(toHour(sessions.datetime),2)*2 AS hour, + COUNT(sessions.session_id) AS count FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY hour @@ -1293,12 +1247,12 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1), if value is not None: ch_sub_query.append("pages.url_path = %(value)s") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT (SELECT COALESCE(AVG(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0) AS avg_response_time, + ch_query = f"""SELECT (SELECT COALESCE(avgOrNull(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0) AS avg_response_time, (SELECT COUNT(pages.session_id) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)}) AS count_requests, - (SELECT COALESCE(AVG(pages.first_paint),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0) AS avg_first_paint, - (SELECT COALESCE(AVG(pages.dom_content_loaded_event_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0) AS avg_dom_content_loaded, - (SELECT COALESCE(AVG(pages.ttfb),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0) AS avg_till_first_bit, - (SELECT COALESCE(AVG(pages.time_to_interactive),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0) AS avg_time_to_interactive;""" + (SELECT COALESCE(avgOrNull(pages.first_paint),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0) AS avg_first_paint, + (SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0) AS avg_dom_content_loaded, + (SELECT COALESCE(avgOrNull(pages.ttfb),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0) AS avg_till_first_bit, + (SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0) AS avg_time_to_interactive;""" rows = ch.execute(query=ch_query, params={"project_id": project_id, "startTimestamp": startTimestamp, @@ -1320,7 +1274,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(pages.visually_complete) AS avg + COALESCE(avgOrNull(pages.visually_complete),0) AS value FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1330,13 +1284,14 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(pages.visually_complete) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(pages.visually_complete),0) AS avg FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avg": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, - end_time=endTimestamp, density=density, - neutral={"avg": 0})} + return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, density=density, + neutral={"value": 0}), + "unit": schemas.TemplatePredefinedUnits.millisecond} def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1353,10 +1308,10 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(DISTINCT pages.session_id) AS count + COUNT(DISTINCT pages.session_id) AS count FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} - AND (pages.response_time)>(SELECT AVG(pages.response_time) + AND (pages.response_time)>(SELECT COALESCE(avgOrNull(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(sch_sub_query)})*2 GROUP BY timestamp @@ -1382,7 +1337,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(performance.avg_used_js_heap_size) AS avg_used_js_heap_size + COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS value FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1392,15 +1347,16 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(performance.avg_used_js_heap_size) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS avg FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avgUsedJsHeapSize": avg, + return {"value": avg, "chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"avg_used_js_heap_size": 0}))} + neutral={"value": 0})), + "unit": schemas.TemplatePredefinedUnits.memory} def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1413,7 +1369,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(performance.avg_cpu) AS avg_cpu + COALESCE(avgOrNull(performance.avg_cpu),0) AS value FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1423,15 +1379,16 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(performance.avg_cpu) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_cpu),0) AS avg FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avgCpu": avg, + return {"value": avg, "chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"avg_cpu": 0}))} + neutral={"value": 0})), + "unit": schemas.TemplatePredefinedUnits.percentage} def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1444,7 +1401,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(performance.avg_fps) AS avg_fps + COALESCE(avgOrNull(performance.avg_fps),0) AS value FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1454,15 +1411,16 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(performance.avg_fps) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_fps),0) AS avg FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"avgFps": avg, + return {"value": avg, "chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, density=density, - neutral={"avg_fps": 0}))} + neutral={"value": 0})), + "unit": schemas.TemplatePredefinedUnits.frame} def __get_crashed_sessions_ids(project_id, startTimestamp, endTimestamp): @@ -1698,9 +1656,8 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - resources.url_host AS domain, - AVG(resources.duration) AS avg + ch_query = f"""SELECT resources.url_host AS domain, + COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY resources.url_host @@ -1710,7 +1667,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) - ch_query = f"""SELECT AVG(resources.duration) AS avg + ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 @@ -1732,7 +1689,7 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1) WHERE {" AND ".join(ch_sub_query)} GROUP BY resources.url_host ORDER BY errors_count DESC - LIMIT 10;""" + LIMIT 5;""" rows = ch.execute(query=ch_query, params={"project_id": project_id, "startTimestamp": startTimestamp, @@ -1747,15 +1704,13 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=- ch_sub_query += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - b.user_browser AS browser, - b.count, - groupArray([bv.user_browser_version, toString(bv.count)]) AS versions + ch_query = f"""SELECT b.user_browser AS browser, + b.count, + groupArray([bv.user_browser_version, toString(bv.count)]) AS versions FROM ( - SELECT - sessions.user_browser, - COUNT(sessions.session_id) AS count + SELECT sessions.user_browser, + COUNT(sessions.session_id) AS count FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY sessions.user_browser @@ -1764,10 +1719,9 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=- ) AS b INNER JOIN ( - SELECT - sessions.user_browser, - sessions.user_browser_version, - COUNT(sessions.session_id) AS count + SELECT sessions.user_browser, + sessions.user_browser_version, + COUNT(sessions.session_id) AS count FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} GROUP BY @@ -1934,8 +1888,8 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d "endTimestamp": endTimestamp, **__get_constraint_values(args)} with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(resources.session_id) AS total, - SUM(if(resources.type='fetch',1,0)) AS xhr + COUNT(resources.session_id) AS total, + SUM(if(resources.type='fetch',1,0)) AS xhr FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -1946,7 +1900,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d density=density, neutral={"total": 0, "xhr": 0}) ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - AVG(pages.response_end) AS avg_response_end + COALESCE(avgOrNull(pages.response_end),0) AS avg_response_end FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart_response_end)} GROUP BY timestamp @@ -1969,8 +1923,8 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de with ch_client.ClickHouseClient() as ch: ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, - COUNT(DISTINCT errors.session_id) AS sessions_count, - COUNT(DISTINCT errors.error_id) AS errors_count + COUNT(DISTINCT errors.session_id) AS sessions_count, + COUNT(DISTINCT errors.error_id) AS errors_count FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY timestamp @@ -2008,15 +1962,13 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT - toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp, - AVG(NULLIF(s.count,0)) AS avg, - groupArray([toString(t.type), toString(t.xavg)]) AS types + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp, + COALESCE(avgOrNull(NULLIF(s.count,0)),0) AS avg, + groupArray([toString(t.type), toString(t.xavg)]) AS types FROM - ( SELECT - resources.session_id, - MIN(resources.datetime) AS base_datetime, - COUNT(resources.url) AS count + ( SELECT resources.session_id, + MIN(resources.datetime) AS base_datetime, + COUNT(resources.url) AS count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} GROUP BY resources.session_id @@ -2024,7 +1976,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de INNER JOIN (SELECT session_id, type, - AVG(NULLIF(count,0)) AS xavg + COALESCE(avgOrNull(NULLIF(count,0)),0) AS xavg FROM (SELECT resources.session_id, resources.type, COUNT(resources.url) AS count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} @@ -2057,8 +2009,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de return helper.list_to_camel_case( __merge_charts( [{"timestamp": i["timestamp"], "avgCountResources": i["avg"], "types": i["types"]} for i in resources], - [{"timestamp": i["timestamp"], "avgTimeToRender": i["avg"]} for i in - time_to_render["chart"]])) + [{"timestamp": i["timestamp"], "avgTimeToRender": i["value"]} for i in time_to_render["chart"]])) def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -2137,3 +2088,721 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1) density=density, neutral={"first_party": 0, "third_party": 0})) + + +def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with ch_client.ClickHouseClient() as ch: + row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = get_performance_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("pages.load_event_end>0") + ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + row = ch.execute(query=ch_query, params=params)[0] + result = row + for k in result: + if result[k] is None: + result[k] = 0 + return result + + +def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, resources=None, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + location_constraints = [] + meta_condition = __get_meta_constraint(args) + + location_constraints_vals = {} + + if resources and len(resources) > 0: + for r in resources: + if r["type"] == "LOCATION": + location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s") + location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value'] + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, + data=args) + ch_sub_query_chart += meta_condition + ch_sub_query_chart.append("pages.load_event_end>0") + + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(pages.load_event_end),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""} + GROUP BY timestamp + ORDER BY timestamp;""" + + rows = ch.execute(query=ch_query, params={**params, **location_constraints_vals, **__get_constraint_values(args)}) + pages = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + + # for s in pages: + # for k in s: + # if s[k] is None: + # s[k] = 0 + return pages + + +def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with ch_client.ClickHouseClient() as ch: + row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = get_performance_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="resources", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("resources.type= %(type)s") + ch_sub_query.append("resources.duration>0") + ch_query = f"""\ + SELECT COALESCE(avgOrNull(resources.duration),0) AS value + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + row = ch.execute(query=ch_query, + params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})[0] + result = row + # for k in result: + # if result[k] is None: + # result[k] = 0 + return result + + +def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, resources=None, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + img_constraints = [] + ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + img_constraints_vals = {} + + if resources and len(resources) > 0: + for r in resources: + if r["type"] == "IMG": + img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + ch_sub_query_chart.append("resources.duration>0") + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS value + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + AND resources.type = 'img' + {(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **img_constraints_vals, **__get_constraint_values(args)}) + images = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + + # for s in images: + # for k in s: + # if s[k] is None: + # s[k] = 0 + return images + + +def get_application_activity_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with ch_client.ClickHouseClient() as ch: + row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + results = helper.dict_to_camel_case(row) + results["chart"] = get_performance_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args) + previous = helper.dict_to_camel_case(row) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="resources", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("resources.type= %(type)s") + ch_sub_query.append("resources.duration>0") + ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS value + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + row = ch.execute(query=ch_query, + params={"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)})[0] + result = row + # for k in result: + # if result[k] is None: + # result[k] = 0 + return result + + +def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), + density=19, resources=None, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + request_constraints = [] + ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + request_constraints_vals = {} + + if resources and len(resources) > 0: + for r in resources: + if r["type"] != "IMG" and r["type"] == "LOCATION": + request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + ch_sub_query_chart.append("resources.duration>0") + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(resources.duration),0) AS value + FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + AND resources.type = 'fetch' + {(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, + params={**params, **request_constraints_vals, **__get_constraint_values(args)}) + requests = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, density=density, + neutral={"value": 0}) + + # for s in requests: + # for k in s: + # if s[k] is None: + # s[k] = 0 + return requests + + +def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with ch_client.ClickHouseClient() as ch: + results = {} + rows = __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + results = helper.dict_to_camel_case(rows[0]) + results["chart"] = __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTimestamp, + endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + rows = __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + previous = helper.dict_to_camel_case(rows[0]) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("pages.dom_content_loaded_event_end>0") + ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + return rows + + +def __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTimestamp, endTimestamp, density=19, + **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + ch_sub_query_chart.append("pages.dom_content_loaded_event_end>0") + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + + # for s in rows: + # for k in s: + # if s[k] is None: + # s[k] = 0 + return rows + + +def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + with ch_client.ClickHouseClient() as ch: + rows = __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + results = helper.dict_to_camel_case(rows[0]) + results["chart"] = __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTimestamp, + endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + rows = __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + previous = helper.dict_to_camel_case(rows[0]) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("pages.first_contentful_paint>0") + # changed dom_content_loaded_event_start to dom_content_loaded_event_end + ch_query = f"""\ + SELECT COALESCE(avgOrNull(pages.first_contentful_paint),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + return rows + + +def __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTimestamp, endTimestamp, density=20, + **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + ch_sub_query_chart.append("pages.first_contentful_paint>0") + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(pages.first_contentful_paint),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + return rows + + +def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + results = {} + + with ch_client.ClickHouseClient() as ch: + rows = __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + results = helper.dict_to_camel_case(rows[0]) + for key in results: + if isnan(results[key]): + results[key] = 0 + results["chart"] = __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, + endTimestamp, **args) + + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + rows = __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args) + + if len(rows) > 0: + previous = helper.dict_to_camel_case(rows[0]) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.count + return results + + +def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions.pages_count>0") + ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(sessions.pages_count)),0) AS value + FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + rows = ch.execute(query=ch_query, params=params) + + return rows + + +def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + ch_sub_query_chart.append("sessions.pages_count>0") + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(sessions.pages_count),0) AS value + FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + return rows + + +def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), **args): + results = {} + + with ch_client.ClickHouseClient() as ch: + rows = __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args) + if len(rows) > 0: + results = helper.dict_to_camel_case(rows[0]) + for key in results: + if isnan(results[key]): + results[key] = 0 + results["chart"] = __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestamp, + endTimestamp, **args) + diff = endTimestamp - startTimestamp + endTimestamp = startTimestamp + startTimestamp = endTimestamp - diff + rows = __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args) + + if len(rows) > 0: + previous = helper.dict_to_camel_case(rows[0]) + results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args): + ch_sub_query = __get_basic_constraints(table_name="sessions", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("isNotNull(sessions.duration)") + ch_sub_query.append("sessions.duration>0") + + ch_query = f"""SELECT COALESCE(avgOrNull(sessions.duration),0) AS value + FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, + **__get_constraint_values(args)} + + rows = ch.execute(query=ch_query, params=params) + + return rows + + +def __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestamp, endTimestamp, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="sessions", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + ch_sub_query_chart.append("isNotNull(sessions.duration)") + ch_sub_query_chart.append("sessions.duration>0") + params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp} + + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COALESCE(avgOrNull(sessions.duration),0) AS value + FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + return rows + + +def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COUNT(pages.response_time) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.response_time) AND pages.response_time>0 + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + results["chart"] = rows + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(results) + + +def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COUNT(pages.session_id) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + result = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, + COUNT(pages.session_id) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)}) + rows = __complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, neutral={"value": 0}) + result["chart"] = rows + result["unit"] = schemas.TemplatePredefinedUnits.count + return helper.dict_to_camel_case(result) + + +def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COALESCE(avgOrNull(pages.first_paint),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0;""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COALESCE(avgOrNull(pages.first_paint),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.first_paint) AND pages.first_paint>0 + GROUP BY timestamp + ORDER BY timestamp;;""" + rows = ch.execute(query=ch_query, params=params) + results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, + neutral={"value": 0})) + + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(results) + + +def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=19, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + ch_sub_query.append("isNotNull(pages.dom_content_loaded_event_time)") + ch_sub_query.append("pages.dom_content_loaded_event_time>0") + ch_sub_query_chart.append("isNotNull(pages.dom_content_loaded_event_time)") + ch_sub_query_chart.append("pages.dom_content_loaded_event_time>0") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = helper.dict_to_camel_case(rows[0]) + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params=params) + results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, + neutral={"value": 0})) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return results + + +def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + ch_sub_query.append("isNotNull(pages.ttfb)") + ch_sub_query.append("pages.ttfb>0") + ch_sub_query_chart.append("isNotNull(pages.ttfb)") + ch_sub_query_chart.append("pages.ttfb>0") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COALESCE(avgOrNull(pages.ttfb),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COALESCE(avgOrNull(pages.ttfb),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params=params) + results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, + neutral={"value": 0})) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(results) + + +def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1), + endTimestamp=TimeUTC.now(), value=None, density=20, **args): + step_size = __get_step_size(startTimestamp, endTimestamp, density) + ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True, data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query_chart += meta_condition + + ch_sub_query = __get_basic_constraints(table_name="pages", data=args) + ch_sub_query += meta_condition + + if value is not None: + ch_sub_query.append("pages.url_path = %(value)s") + ch_sub_query_chart.append("pages.url_path = %(value)s") + ch_sub_query.append("isNotNull(pages.time_to_interactive)") + ch_sub_query.append("pages.time_to_interactive >0") + ch_sub_query_chart.append("isNotNull(pages.time_to_interactive)") + ch_sub_query_chart.append("pages.time_to_interactive >0") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)};""" + params = {"step_size": step_size, "project_id": project_id, + "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, + "value": value, **__get_constraint_values(args)} + rows = ch.execute(query=ch_query, params=params) + results = rows[0] + ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp, + COALESCE(avgOrNull(pages.time_to_interactive),0) AS value + FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query_chart)} + GROUP BY timestamp + ORDER BY timestamp;""" + rows = ch.execute(query=ch_query, params=params) + results["chart"] = helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp, + end_time=endTimestamp, + density=density, + neutral={"value": 0})) + results["unit"] = schemas.TemplatePredefinedUnits.millisecond + return helper.dict_to_camel_case(results) diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index 75a3a31d0..6a06e8230 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -41,19 +41,8 @@ def __create(tenant_id, name): return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) -def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, version=False, - last_tracker_version=None, user_id=None): +def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, user_id=None): with pg_client.PostgresClient() as cur: - tracker_query = "" - if last_tracker_version is not None and len(last_tracker_version) > 0: - tracker_query = cur.mogrify( - """,(SELECT tracker_version FROM public.sessions - WHERE sessions.project_id = s.project_id - AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""", - {"version": last_tracker_version}).decode('UTF-8') - elif version: - tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version" - role_query = """INNER JOIN LATERAL (SELECT 1 FROM users INNER JOIN roles USING (role_id) @@ -66,11 +55,10 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st cur.execute( cur.mogrify(f"""\ SELECT - s.project_id, s.name, s.project_key + s.project_id, s.name, s.project_key, s.save_request_payloads {',s.gdpr' if gdpr else ''} {',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''} {',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''} - {tracker_query} FROM public.projects AS s {'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''} {role_query if user_id is not None else ""} @@ -82,49 +70,38 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st rows = cur.fetchall() if recording_state: project_ids = [f'({r["project_id"]})' for r in rows] - query = f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last - FROM (VALUES {",".join(project_ids)}) AS projects(project_id) - LEFT JOIN sessions USING (project_id) - GROUP BY project_id;""" - cur.execute( - query=query - ) + query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last + FROM (VALUES {",".join(project_ids)}) AS projects(project_id) + LEFT JOIN sessions USING (project_id) + WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s + GROUP BY project_id;""", + {"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)}) + + cur.execute(query=query) status = cur.fetchall() for r in rows: + r["status"] = "red" for s in status: if s["project_id"] == r["project_id"]: - if s["last"] < TimeUTC.now(-2): - r["status"] = "red" - elif s["last"] < TimeUTC.now(-1): + if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1): r["status"] = "yellow" - else: + elif s["last"] >= TimeUTC.now(-1): r["status"] = "green" break return helper.list_to_camel_case(rows) -def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None, version=False, - last_tracker_version=None): +def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None): with pg_client.PostgresClient() as cur: - tracker_query = "" - if last_tracker_version is not None and len(last_tracker_version) > 0: - tracker_query = cur.mogrify( - """,(SELECT tracker_version FROM public.sessions - WHERE sessions.project_id = s.project_id - AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""", - {"version": last_tracker_version}).decode('UTF-8') - elif version: - tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version" - query = cur.mogrify(f"""\ SELECT s.project_id, s.project_key, - s.name + s.name, + s.save_request_payloads {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""} {',s.gdpr' if include_gdpr else ''} - {tracker_query} FROM public.projects AS s where s.tenant_id =%(tenant_id)s AND s.project_id =%(project_id)s @@ -187,6 +164,17 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): changes={"name": data.name})} +def count_by_tenant(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""\ + SELECT + count(s.project_id) + FROM public.projects AS s + WHERE s.deleted_at IS NULL + AND tenant_id= %(tenant_id)s;""", {"tenant_id": tenant_id})) + return cur.fetchone()["count"] + + def delete(tenant_id, user_id, project_id): admin = users.get(user_id=user_id, tenant_id=tenant_id) @@ -257,7 +245,8 @@ def get_project_key(project_id): where project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id}) ) - return cur.fetchone()["project_key"] + project = cur.fetchone() + return project["project_key"] if project is not None else None def get_capture_status(project_id): @@ -324,7 +313,7 @@ def is_authorized_batch(project_ids, tenant_id): query = cur.mogrify("""\ SELECT project_id FROM public.projects - where tenant_id =%(tenant_id)s + WHERE tenant_id =%(tenant_id)s AND project_id IN %(project_ids)s AND deleted_at IS NULL;""", {"tenant_id": tenant_id, "project_ids": tuple(project_ids)}) @@ -334,3 +323,13 @@ def is_authorized_batch(project_ids, tenant_id): ) rows = cur.fetchall() return [r["project_id"] for r in rows] + + +def get_projects_ids(tenant_id): + with pg_client.PostgresClient() as cur: + cur.execute(cur.mogrify("""SELECT s.project_id + FROM public.projects AS s + WHERE tenant_id =%(tenant_id)s AND s.deleted_at IS NULL + ORDER BY s.project_id;""", {"tenant_id": tenant_id})) + rows = cur.fetchall() + return [r["project_id"] for r in rows] diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py index 332d3709a..4e4f1c4e8 100644 --- a/ee/api/chalicelib/core/resources.py +++ b/ee/api/chalicelib/core/resources.py @@ -3,14 +3,14 @@ from chalicelib.utils import ch_client from chalicelib.utils.TimeUTC import TimeUTC -def get_by_session_id(session_id): +def get_by_session_id(session_id, project_id): with ch_client.ClickHouseClient() as ch: ch_query = """\ SELECT datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status FROM resources - WHERE session_id = toUInt64(%(session_id)s);""" - params = {"session_id": session_id} + WHERE session_id = toUInt64(%(session_id)s) AND project_id=%(project_id)s;""" + params = {"session_id": session_id, "project_id": project_id} rows = ch.execute(query=ch_query, params=params) results = [] for r in rows: diff --git a/ee/api/chalicelib/core/telemetry.py b/ee/api/chalicelib/core/telemetry.py index d9843e37d..9c82290fb 100644 --- a/ee/api/chalicelib/core/telemetry.py +++ b/ee/api/chalicelib/core/telemetry.py @@ -50,10 +50,12 @@ def compute(): FROM public.tenants ) AS all_tenants WHERE tenants.tenant_id = all_tenants.tenant_id - RETURNING *,(SELECT email FROM users_ee WHERE role = 'owner' AND users_ee.tenant_id = tenants.tenant_id LIMIT 1);""" + RETURNING name,t_integrations,t_projects,t_sessions,t_users,user_id,opt_out, + (SELECT openreplay_version()) AS version_number, + (SELECT email FROM public.users WHERE role = 'owner' AND users.tenant_id=tenants.tenant_id LIMIT 1);""" ) data = cur.fetchall() - requests.post('https://parrot.asayer.io/os/telemetry', + requests.post('https://api.openreplay.com/os/telemetry', json={"stats": [process_data(d, edition='ee') for d in data]}) @@ -65,4 +67,4 @@ def new_client(tenant_id): FROM public.tenants WHERE tenant_id=%(tenant_id)s;""", {"tenant_id": tenant_id})) data = cur.fetchone() - requests.post('https://parrot.asayer.io/os/signup', json=process_data(data, edition='ee')) \ No newline at end of file + requests.post('https://api.openreplay.com/os/signup', json=process_data(data, edition='ee')) \ No newline at end of file diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 9ca77c1ea..b70f6a269 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -632,7 +632,6 @@ def change_jwt_iat(user_id): return cur.fetchone().get("jwt_iat") -@dev.timed def authenticate(email, password, for_change_password=False, for_plugin=False): with pg_client.PostgresClient() as cur: query = cur.mogrify( diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index babdd669a..aa45699f7 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -25,5 +25,8 @@ class ClickHouseClient: def client(self): return self.__client + def format(self, query, params): + return self.__client.substitute_params(query, params) + def __exit__(self, *args): pass diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 84a372567..5909d31c1 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -4,11 +4,11 @@ boto3==1.16.1 pyjwt==1.7.1 psycopg2-binary==2.8.6 elasticsearch==7.9.1 -jira==2.0.0 +jira==3.1.1 clickhouse-driver==0.2.2 python3-saml==1.12.0 -fastapi==0.74.1 +fastapi==0.75.0 python-multipart==0.0.5 uvicorn[standard]==0.17.5 python-decouple==3.6 diff --git a/ee/api/routers/base.py b/ee/api/routers/base.py deleted file mode 100644 index 5c665b2d1..000000000 --- a/ee/api/routers/base.py +++ /dev/null @@ -1,14 +0,0 @@ -from fastapi import APIRouter, Depends - -from auth.auth_apikey import APIKeyAuth -from auth.auth_jwt import JWTAuth -from auth.auth_project import ProjectAuthorizer -from or_dependencies import ORRoute - - -def get_routers() -> (APIRouter, APIRouter, APIRouter): - public_app = APIRouter(route_class=ORRoute) - app = APIRouter(dependencies=[Depends(JWTAuth()), Depends(ProjectAuthorizer("projectId"))], route_class=ORRoute) - app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth()), Depends(ProjectAuthorizer("projectKey"))], - route_class=ORRoute) - return public_app, app, app_apikey diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 52fc737e4..31ed1d099 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -56,8 +56,6 @@ def login(data: schemas.UserLoginSchema = Body(...)): c = tenants.get_by_tenant_id(tenant_id) c.pop("createdAt") - c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True, user_id=r["id"]) c["smtp"] = helper.has_smtp() c["iceServers"] = assist.get_ice_servers() r["smtp"] = c["smtp"] @@ -99,10 +97,9 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/projects/{projectId}', tags=['projects']) -def get_project(projectId: int, last_tracker_version: Optional[str] = None, - context: schemas.CurrentContext = Depends(OR_context)): +def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True, - include_gdpr=True, last_tracker_version=last_tracker_version) + include_gdpr=True) if data is None: return {"errors": ["project not found"]} return {"data": data} @@ -229,15 +226,13 @@ def get_client(context: schemas.CurrentContext = Depends(OR_context)): r = tenants.get_by_tenant_id(context.tenant_id) if r is not None: r.pop("createdAt") - r["projects"] = projects.get_projects(tenant_id=context.tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True, user_id=context.user_id) + return { 'data': r } @app.get('/projects', tags=['projects']) -def get_projects(last_tracker_version: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)): +def get_projects(context: schemas.CurrentContext = Depends(OR_context)): return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, - stack_integrations=True, version=True, - last_tracker_version=last_tracker_version, user_id=context.user_id)} + stack_integrations=True, user_id=context.user_id)} diff --git a/ee/api/routers/app/v1_api_ee.py b/ee/api/routers/subs/v1_api_ee.py similarity index 100% rename from ee/api/routers/app/v1_api_ee.py rename to ee/api/routers/subs/v1_api_ee.py diff --git a/ee/backend/pkg/db/clickhouse/messages_ios.go b/ee/backend/pkg/db/clickhouse/messages-ios.go similarity index 100% rename from ee/backend/pkg/db/clickhouse/messages_ios.go rename to ee/backend/pkg/db/clickhouse/messages-ios.go diff --git a/ee/backend/pkg/db/clickhouse/messages_web.go b/ee/backend/pkg/db/clickhouse/messages-web.go similarity index 100% rename from ee/backend/pkg/db/clickhouse/messages_web.go rename to ee/backend/pkg/db/clickhouse/messages-web.go diff --git a/ee/backend/pkg/db/postgres/alert.go b/ee/backend/pkg/db/postgres/alert.go deleted file mode 100644 index 301d7b540..000000000 --- a/ee/backend/pkg/db/postgres/alert.go +++ /dev/null @@ -1,228 +0,0 @@ -package postgres - -import ( - "database/sql" - "errors" - "fmt" - sq "github.com/Masterminds/squirrel" - "log" - "strconv" - "time" -) - -type TimeString sql.NullString -type query struct { - Left string `db:"query.left" json:"left"` - Operator string `db:"query.operator" json:"operator"` - Right float64 `db:"query.right" json:"right"` -} -type options struct { - RenotifyInterval int64 `db:"options.renotifyInterval" json:"renotifyInterval"` - LastNotification int64 `db:"options.lastNotification" json:"lastNotification;omitempty"` - CurrentPeriod int64 `db:"options.currentPeriod" json:"currentPeriod"` - PreviousPeriod int64 `db:"options.previousPeriod" json:"previousPeriod;omitempty"` - Message []map[string]string `db:"options.message" json:"message;omitempty"` - Change string `db:"options.change" json:"change;omitempty"` -} -type Alert struct { - AlertID uint32 `db:"alert_id" json:"alert_id"` - ProjectID uint32 `db:"project_id" json:"project_id"` - Name string `db:"name" json:"name"` - Description sql.NullString `db:"description" json:"description"` - Active bool `db:"active" json:"active"` - DetectionMethod string `db:"detection_method" json:"detection_method"` - Query query `db:"query" json:"query"` - DeletedAt *int64 `db:"deleted_at" json:"deleted_at"` - CreatedAt *int64 `db:"created_at" json:"created_at"` - Options options `db:"options" json:"options"` - TenantId uint32 `db:"tenant_id" json:"tenant_id"` -} - -func (pg *Conn) IterateAlerts(iter func(alert *Alert, err error)) error { - rows, err := pg.query(` - SELECT - alerts.alert_id, - alerts.project_id, - alerts.name, - alerts.description, - alerts.active, - alerts.detection_method, - alerts.query, - CAST(EXTRACT(epoch FROM alerts.deleted_at) * 1000 AS BIGINT) AS deleted_at, - CAST(EXTRACT(epoch FROM alerts.created_at) * 1000 AS BIGINT) AS created_at, - alerts.options, - projects.tenant_id - FROM public.alerts INNER JOIN public.projects USING(project_id) - WHERE alerts.active AND alerts.deleted_at ISNULL; - `) - if err != nil { - return err - } - defer rows.Close() - for rows.Next() { - a := new(Alert) - if err = rows.Scan( - &a.AlertID, - &a.ProjectID, - &a.Name, - &a.Description, - &a.Active, - &a.DetectionMethod, - &a.Query, - &a.DeletedAt, - &a.CreatedAt, - &a.Options, - &a.TenantId, - ); err != nil { - iter(nil, err) - continue - } - iter(a, nil) - } - - if err = rows.Err(); err != nil { - return err - } - return nil -} - -func (pg *Conn) SaveLastNotification(allIds []uint32) error { - var paramrefs string - for _, v := range allIds { - paramrefs += strconv.Itoa(int(v)) + `,` - } - paramrefs = paramrefs[:len(paramrefs)-1] // remove last "," - q := "UPDATE public.Alerts SET options = options||'{\"lastNotification\":" + strconv.Itoa(int(time.Now().Unix()*1000)) + "}'::jsonb WHERE alert_id IN (" + paramrefs + ");" - //log.Println(q) - log.Println("Updating PG") - return pg.exec(q) -} - -type columnDefinition struct { - table string - formula string - condition string - group string -} - -var LeftToDb = map[string]columnDefinition{ - "performance.dom_content_loaded.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"}, - "performance.first_meaningful_paint.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"}, - "performance.page_load_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(load_time ,0))"}, - "performance.dom_build_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(dom_building_time,0))"}, - "performance.speed_index.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(speed_index,0))"}, - "performance.page_response_time.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(response_time,0))"}, - "performance.ttfb.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(first_paint_time,0))"}, - "performance.time_to_render.average": {table: "events.pages INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(visually_complete,0))"}, - "performance.image_load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))", condition: "type='img'"}, - "performance.request_load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))", condition: "type='fetch'"}, - "resources.load_time.average": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "AVG(NULLIF(resources.duration,0))"}, - "resources.missing.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(DISTINCT url_hostpath)", condition: "success= FALSE"}, - "errors.4xx_5xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100!=2"}, - "errors.4xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100=4"}, - "errors.5xx.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(session_id)", condition: "status/100=5"}, - "errors.javascript.impacted_sessions.count": {table: "events.resources INNER JOIN public.sessions USING(session_id)", formula: "COUNT(DISTINCT session_id)", condition: "success= FALSE AND type='script'"}, - "performance.crashes.count": {table: "(SELECT *, start_ts AS timestamp FROM public.sessions WHERE errors_count > 0) AS sessions", formula: "COUNT(DISTINCT session_id)", condition: "errors_count > 0"}, - "errors.javascript.count": {table: "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", formula: "COUNT(DISTINCT session_id)", condition: "source='js_exception'"}, - "errors.backend.count": {table: "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", formula: "COUNT(DISTINCT session_id)", condition: "source!='js_exception'"}, -} - -//This is the frequency of execution for each threshold -var TimeInterval = map[int64]int64{ - 15: 3, - 30: 5, - 60: 10, - 120: 20, - 240: 30, - 1440: 60, -} - -func (a *Alert) CanCheck() bool { - now := time.Now().Unix() * 1000 - var repetitionBase int64 - - if repetitionBase = a.Options.CurrentPeriod; a.DetectionMethod == "change" && a.Options.CurrentPeriod > a.Options.PreviousPeriod { - repetitionBase = a.Options.PreviousPeriod - } - - if _, ok := TimeInterval[repetitionBase]; !ok { - log.Printf("repetitionBase: %d NOT FOUND", repetitionBase) - return false - } - return a.DeletedAt == nil && a.Active && - (a.Options.RenotifyInterval <= 0 || - a.Options.LastNotification <= 0 || - ((now - a.Options.LastNotification) > a.Options.RenotifyInterval*60*1000)) && - ((now-*a.CreatedAt)%(TimeInterval[repetitionBase]*60*1000)) < 60*1000 -} - -func (a *Alert) Build() (sq.SelectBuilder, error) { - colDef, ok := LeftToDb[a.Query.Left] - if !ok { - return sq.Select(), errors.New(fmt.Sprintf("!! unsupported metric '%s' from alert: %d:%s\n", a.Query.Left, a.AlertID, a.Name)) - } - - subQ := sq. - Select(colDef.formula + " AS value"). - From(colDef.table). - Where(sq.And{sq.Expr("project_id = $1 ", a.ProjectID), - sq.Expr(colDef.condition)}) - q := sq.Select(fmt.Sprint("value, coalesce(value,0)", a.Query.Operator, a.Query.Right, " AS valid")) - if len(colDef.group) > 0 { - subQ = subQ.Column(colDef.group + " AS group_value") - subQ = subQ.GroupBy(colDef.group) - q = q.Column("group_value") - } - - if a.DetectionMethod == "threshold" { - q = q.FromSelect(subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)), "stat") - } else if a.DetectionMethod == "change" { - if a.Options.Change == "change" { - if len(colDef.group) == 0 { - sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)).ToSql() - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-2*a.Options.CurrentPeriod*60), - }).ToSql() - sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")-(" + sub2 + ")) AS value").ToSql() - q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...) - } else { - subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)) - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-2*a.Options.CurrentPeriod*60), - }).ToSql() - sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...) - q = q.FromSelect(sub1, "stat") - } - } else if a.Options.Change == "percent" { - if len(colDef.group) == 0 { - sub1, args1, _ := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)).ToSql() - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod*60-a.Options.CurrentPeriod*60), - }).ToSql() - sub1, _, _ = sq.Expr("SELECT ((" + sub1 + ")/(" + sub2 + ")-1)*100 AS value").ToSql() - q = q.JoinClause("FROM ("+sub1+") AS stat", append(args1, args2...)...) - } else { - subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod*60)) - sub2, args2, _ := subQ.Where( - sq.And{ - sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod*60), - sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod*60-a.Options.CurrentPeriod*60), - }).ToSql() - sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...) - q = q.FromSelect(sub1, "stat") - } - } else { - return q, errors.New("unsupported change method") - } - - } else { - return q, errors.New("unsupported detection method") - } - return q, nil -} \ No newline at end of file diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go index ca0544923..65d2cd830 100644 --- a/ee/backend/pkg/kafka/consumer.go +++ b/ee/backend/pkg/kafka/consumer.go @@ -25,7 +25,12 @@ type Consumer struct { lastKafkaEventTs int64 } -func NewConsumer(group string, topics []string, messageHandler types.MessageHandler) *Consumer { +func NewConsumer( + group string, + topics []string, + messageHandler types.MessageHandler, + autoCommit bool, +) *Consumer { protocol := "plaintext" if env.Bool("KAFKA_USE_SSL") { protocol = "ssl" @@ -37,6 +42,7 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand "enable.auto.commit": "false", "security.protocol": protocol, "go.application.rebalance.enable": true, + "max.poll.interval.ms": env.Int("KAFKA_MAX_POLL_INTERVAL_MS"), }) if err != nil { log.Fatalln(err) @@ -53,18 +59,19 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand log.Fatalln(err) } + var commitTicker *time.Ticker + if autoCommit { + commitTicker = time.NewTicker(2 * time.Minute) + } + return &Consumer{ c: c, messageHandler: messageHandler, - commitTicker: time.NewTicker(2 * time.Minute), + commitTicker: commitTicker, pollTimeout: 200, } } -func (consumer *Consumer) DisableAutoCommit() { - consumer.commitTicker.Stop() -} - func (consumer *Consumer) Commit() error { consumer.c.Commit() // TODO: return error if it is not "No offset stored" return nil @@ -128,10 +135,12 @@ func (consumer *Consumer) ConsumeNext() error { return nil } - select { - case <-consumer.commitTicker.C: - consumer.Commit() - default: + if consumer.commitTicker != nil { + select { + case <-consumer.commitTicker.C: + consumer.Commit() + default: + } } switch e := ev.(type) { @@ -139,7 +148,7 @@ func (consumer *Consumer) ConsumeNext() error { if e.TopicPartition.Error != nil { return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error") } - ts := e.Timestamp.UnixNano() / 1e6 + ts := e.Timestamp.UnixMilli() consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{ Topic: *(e.TopicPartition.Topic), ID: uint64(e.TopicPartition.Offset), diff --git a/ee/backend/pkg/license/check.go b/ee/backend/pkg/license/check.go index 6b33a625e..771558946 100644 --- a/ee/backend/pkg/license/check.go +++ b/ee/backend/pkg/license/check.go @@ -33,7 +33,7 @@ func CheckLicense() { log.Fatal("Can not form a license check request.") } - resp, err := http.Post("https://parrot.asayer.io/os/license", "application/json", bytes.NewReader(requestBody)) + resp, err := http.Post("https://api.openreplay.com/os/license", "application/json", bytes.NewReader(requestBody)) if err != nil { log.Fatalf("Error while checking license. %v", err) } diff --git a/ee/backend/pkg/queue/import.go b/ee/backend/pkg/queue/import.go index abff07e9a..e95eb11e5 100644 --- a/ee/backend/pkg/queue/import.go +++ b/ee/backend/pkg/queue/import.go @@ -2,17 +2,16 @@ package queue import ( "openreplay/backend/pkg/kafka" - "openreplay/backend/pkg/queue/types" "openreplay/backend/pkg/license" + "openreplay/backend/pkg/queue/types" ) -func NewConsumer(group string, topics []string, handler types.MessageHandler) types.Consumer { +func NewConsumer(group string, topics []string, handler types.MessageHandler, autoCommit bool) types.Consumer { license.CheckLicense() - return kafka.NewConsumer(group, topics, handler) + return kafka.NewConsumer(group, topics, handler, autoCommit) } func NewProducer() types.Producer { license.CheckLicense() return kafka.NewProducer() } - diff --git a/ee/connectors/db/writer.py b/ee/connectors/db/writer.py index b999b773f..42ebc6f39 100644 --- a/ee/connectors/db/writer.py +++ b/ee/connectors/db/writer.py @@ -1,4 +1,5 @@ import os + DATABASE = os.environ['DATABASE_NAME'] from db.api import DBConnection @@ -7,16 +8,17 @@ from db.tables import * if DATABASE == 'redshift': from db.loaders.redshift_loader import transit_insert_to_redshift -if DATABASE == 'clickhouse': +elif DATABASE == 'clickhouse': from db.loaders.clickhouse_loader import insert_to_clickhouse -if DATABASE == 'pg': +elif DATABASE == 'pg': from db.loaders.postgres_loader import insert_to_postgres -if DATABASE == 'bigquery': +elif DATABASE == 'bigquery': from db.loaders.bigquery_loader import insert_to_bigquery from bigquery_utils.create_table import create_tables_bigquery -if DATABASE == 'snowflake': +elif DATABASE == 'snowflake': from db.loaders.snowflake_loader import insert_to_snowflake - +else: + raise Exception(f"{DATABASE}-database not supported") # create tables if don't exist try: @@ -35,12 +37,11 @@ try: db = None except Exception as e: print(repr(e)) - print("Please create the tables with scripts provided in " - "'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'") + print("Please create the tables with scripts provided in " + + f"'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'") def insert_batch(db: DBConnection, batch, table, level='normal'): - if len(batch) == 0: return df = get_df_from_batch(batch, level=level) @@ -60,4 +61,3 @@ def insert_batch(db: DBConnection, batch, table, level='normal'): if db.config == 'snowflake': insert_to_snowflake(db=db, df=df, table=table) - diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql new file mode 100644 index 000000000..d7eeff911 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.6.0/1.6.0.sql @@ -0,0 +1,348 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.6.0-ee' +$$ LANGUAGE sql IMMUTABLE; + + +CREATE TABLE IF NOT EXISTS dashboards +( + dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + name text NOT NULL, + is_public boolean NOT NULL DEFAULT TRUE, + is_pinned boolean NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL +); + + +ALTER TABLE IF EXISTS metrics + DROP CONSTRAINT IF EXISTS null_project_id_for_template_only, + DROP CONSTRAINT IF EXISTS unique_key; + +ALTER TABLE IF EXISTS metrics + ADD COLUMN IF NOT EXISTS edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + ADD COLUMN IF NOT EXISTS is_pinned boolean NOT NULL DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS category text NULL DEFAULT 'custom', + ADD COLUMN IF NOT EXISTS is_predefined boolean NOT NULL DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS is_template boolean NOT NULL DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS predefined_key text NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS default_config jsonb NOT NULL DEFAULT '{ + "col": 2, + "row": 2, + "position": 0 + }'::jsonb, + ALTER COLUMN project_id DROP NOT NULL, + ADD CONSTRAINT null_project_id_for_template_only + CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ), + ADD CONSTRAINT unique_key UNIQUE (predefined_key); + + + +CREATE TABLE IF NOT EXISTS dashboard_widgets +( + widget_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + dashboard_id integer NOT NULL REFERENCES dashboards (dashboard_id) ON DELETE CASCADE, + metric_id integer NOT NULL REFERENCES metrics (metric_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + config jsonb NOT NULL DEFAULT '{}'::jsonb +); + +ALTER TABLE events_common.requests + ADD COLUMN IF NOT EXISTS host text NULL, + ADD COLUMN IF NOT EXISTS path text NULL, + ADD COLUMN IF NOT EXISTS query text NULL; + +ALTER TABLE events.pages + ADD COLUMN IF NOT EXISTS query text NULL; + +DO +$$ + BEGIN + IF EXISTS(SELECT * + FROM information_schema.columns + WHERE table_schema = 'events' + AND table_name = 'pages' + AND column_name = 'base_path') + THEN + ALTER TABLE events.pages + DROP COLUMN IF EXISTS path; + ALTER TABLE events.pages + RENAME COLUMN base_path TO path; + DROP INDEX IF EXISTS events.pages_base_path_gin_idx2; + DROP INDEX IF EXISTS pages_base_path_idx2; + ALTER INDEX IF EXISTS events.pages_base_path_gin_idx RENAME TO pages_path_gin_idx; + ALTER INDEX IF EXISTS events.pages_base_path_idx RENAME TO pages_path_idx; + ALTER INDEX IF EXISTS events.pages_base_path_session_id_timestamp_idx RENAME TO pages_path_session_id_timestamp_idx; + ALTER INDEX IF EXISTS events.pages_base_path_base_pathLNGT2_idx RENAME TO pages_path_pathLNGT2_idx; + END IF; + END +$$; + +COMMIT; + +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'areaChart'; +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'barChart'; +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'stackedBarChart'; +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'stackedBarLineChart'; +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'overview'; +ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'map'; +ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined'; + + +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview'), + + ('Sessions Affected by JS Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'), + ('Top Domains with 4xx Fetch Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'), + ('Top Domains with 5xx Fetch Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'), + ('Errors per Domain', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'errors_per_domains', 'predefined', 'table'), + ('Fetch Calls with Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'calls_errors', 'predefined', 'table'), + ('Errors by Type', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'errors_per_type', 'predefined', 'barChart'), + ('Errors by Origin', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'), + + ('Speed Index by Location', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'speed_location', 'predefined', 'map'), + ('Slowest Domains', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'slowest_domains', 'predefined', 'table'), + ('Sessions per Browser', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'sessions_per_browser', 'predefined', 'table'), + ('Time To Render', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'time_to_render', 'predefined', 'areaChart'), + ('Sessions Impacted by Slow Pages', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'), + ('Memory Consumption', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'memory_consumption', 'predefined', 'areaChart'), + ('CPU Load', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'cpu', 'predefined', 'areaChart'), + ('Frame Rate', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'fps', 'predefined', 'areaChart'), + ('Crashes', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'crashes', 'predefined', 'areaChart'), + ('Resources Loaded vs Visually Complete', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'), + ('DOM Build Time', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'), + ('Pages Response Time', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'pages_response_time', 'predefined', 'areaChart'), + ('Pages Response Time Distribution', 'performance', '{ + "col": 4, + "row": 2, + "position": 0 + }', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'), + + ('Missing Resources', 'resources', '{ + "col": 4, + "row": 2, + "position": 0 + }', true, true, true, 'missing_resources', 'predefined', 'table'), + ('Slowest Resources', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'slowest_resources', 'predefined', 'table'), + ('Resources Fetch Time', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_loading_time', 'predefined', 'table'), + ('Resource Loaded vs Response End', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'), + ('Breakdown of Loaded Resources', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL; +CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; + +CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp); +CREATE INDEX CONCURRENTLY IF NOT EXISTS pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 9adab50e0..461a414fc 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.5.4-ee' +SELECT 'v1.6.0-ee' $$ LANGUAGE sql IMMUTABLE; @@ -100,36 +100,36 @@ $$ LANGUAGE plpgsql; DO $$ BEGIN - IF (with to_check (name) as ( - values ('alerts'), - ('announcements'), - ('assigned_sessions'), - ('autocomplete'), - ('basic_authentication'), - ('errors'), - ('funnels'), - ('integrations'), - ('issues'), - ('jira_cloud'), - ('jobs'), - ('metric_series'), - ('metrics'), - ('notifications'), - ('oauth_authentication'), - ('projects'), - ('roles'), - ('roles_projects'), - ('searches'), - ('sessions'), - ('tenants'), - ('traces'), - ('user_favorite_errors'), - ('user_favorite_sessions'), - ('user_viewed_errors'), - ('user_viewed_sessions'), - ('users'), - ('webhooks') - ) + IF (with to_check (name) as (values ('alerts'), + ('announcements'), + ('assigned_sessions'), + ('autocomplete'), + ('basic_authentication'), + ('dashboards'), + ('dashboard_widgets'), + ('errors'), + ('funnels'), + ('integrations'), + ('issues'), + ('jira_cloud'), + ('jobs'), + ('metric_series'), + ('metrics'), + ('notifications'), + ('oauth_authentication'), + ('projects'), + ('roles'), + ('roles_projects'), + ('searches'), + ('sessions'), + ('tenants'), + ('traces'), + ('user_favorite_errors'), + ('user_favorite_sessions'), + ('user_viewed_errors'), + ('user_viewed_sessions'), + ('users'), + ('webhooks')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'public' @@ -786,23 +786,37 @@ $$ CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id); CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id); - CREATE TYPE metric_type AS ENUM ('timeseries','table'); - CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart'); + CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined'); + CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map'); CREATE TABLE IF NOT EXISTS metrics ( - metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer REFERENCES users (user_id) ON DELETE SET NULL, - name text NOT NULL, - is_public boolean NOT NULL DEFAULT FALSE, - active boolean NOT NULL DEFAULT TRUE, - created_at timestamp DEFAULT timezone('utc'::text, now()) not null, - deleted_at timestamp, - metric_type metric_type NOT NULL DEFAULT 'timeseries', - view_type metric_view_type NOT NULL DEFAULT 'lineChart', - metric_of text NOT NULL DEFAULT 'sessionCount', - metric_value text[] NOT NULL DEFAULT '{}'::text[], - metric_format text + metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer REFERENCES users (user_id) ON DELETE SET NULL, + name text NOT NULL, + is_public boolean NOT NULL DEFAULT FALSE, + active boolean NOT NULL DEFAULT TRUE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp, + edited_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + metric_type metric_type NOT NULL DEFAULT 'timeseries', + view_type metric_view_type NOT NULL DEFAULT 'lineChart', + metric_of text NOT NULL DEFAULT 'sessionCount', + metric_value text[] NOT NULL DEFAULT '{}'::text[], + metric_format text, + category text NULL DEFAULT 'custom', + is_pinned boolean NOT NULL DEFAULT FALSE, + is_predefined boolean NOT NULL DEFAULT FALSE, + is_template boolean NOT NULL DEFAULT FALSE, + predefined_key text NULL DEFAULT NULL, + default_config jsonb NOT NULL DEFAULT '{ + "col": 2, + "row": 2, + "position": 0 + }'::jsonb, + CONSTRAINT null_project_id_for_template_only + CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ), + CONSTRAINT unique_key UNIQUE (predefined_key) ); CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public); CREATE TABLE IF NOT EXISTS metric_series @@ -817,6 +831,29 @@ $$ ); CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id); + + CREATE TABLE dashboards + ( + dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + name text NOT NULL, + is_public boolean NOT NULL DEFAULT TRUE, + is_pinned boolean NOT NULL DEFAULT FALSE, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + deleted_at timestamp NULL DEFAULT NULL + ); + + CREATE TABLE dashboard_widgets + ( + widget_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + dashboard_id integer NOT NULL REFERENCES dashboards (dashboard_id) ON DELETE CASCADE, + metric_id integer NOT NULL REFERENCES metrics (metric_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + config jsonb NOT NULL DEFAULT '{}'::jsonb + ); + CREATE TABLE IF NOT EXISTS searches ( search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, @@ -875,16 +912,14 @@ LANGUAGE plpgsql; DO $$ BEGIN - IF (with to_check (name) as ( - values ('clicks'), - ('errors'), - ('graphql'), - ('inputs'), - ('pages'), - ('performance'), - ('resources'), - ('state_actions') - ) + IF (with to_check (name) as (values ('clicks'), + ('errors'), + ('graphql'), + ('inputs'), + ('pages'), + ('performance'), + ('resources'), + ('state_actions')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'events' @@ -899,7 +934,7 @@ $$ timestamp bigint NOT NULL, host text NOT NULL, path text NOT NULL, - base_path text NOT NULL, + query text NULL, referrer text DEFAULT NULL, base_referrer text DEFAULT NULL, dom_building_time integer DEFAULT NULL, @@ -916,13 +951,9 @@ $$ PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS pages_session_id_idx ON events.pages (session_id); - CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops); CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops); CREATE INDEX IF NOT EXISTS pages_timestamp_idx ON events.pages (timestamp); CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_idx ON events.pages (session_id, timestamp); - CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops); - CREATE INDEX IF NOT EXISTS pages_base_path_idx ON events.pages (base_path); - CREATE INDEX IF NOT EXISTS pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1)); CREATE INDEX IF NOT EXISTS pages_base_referrer_idx ON events.pages (base_referrer); CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer, length(base_referrer) - @@ -948,14 +979,20 @@ $$ CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL; CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL; CREATE INDEX IF NOT EXISTS pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR - first_paint_time > 0 OR - dom_content_loaded_time > 0 OR + first_paint_time > + 0 OR + dom_content_loaded_time > + 0 OR ttfb > 0 OR - time_to_interactive > 0; + time_to_interactive > + 0; CREATE INDEX IF NOT EXISTS pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL; CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL; - CREATE INDEX IF NOT EXISTS pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp); - CREATE INDEX IF NOT EXISTS pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2; + CREATE INDEX IF NOT EXISTS pages_path_session_id_timestamp_idx ON events.pages (path, session_id, timestamp); + CREATE INDEX IF NOT EXISTS pages_path_pathLNGT2_idx ON events.pages (path) WHERE length(path) > 2; + CREATE INDEX IF NOT EXISTS pages_query_nn_idx ON events.pages (query) WHERE query IS NOT NULL; + CREATE INDEX IF NOT EXISTS pages_query_nn_gin_idx ON events.pages USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; + CREATE TABLE IF NOT EXISTS events.clicks ( @@ -1130,11 +1167,9 @@ LANGUAGE plpgsql; DO $$ BEGIN - IF (with to_check (name) as ( - values ('customs'), - ('issues'), - ('requests') - ) + IF (with to_check (name) as (values ('customs'), + ('issues'), + ('requests')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'events_common' @@ -1191,6 +1226,9 @@ $$ response_body text NULL, status_code smallint NULL, method http_method NULL, + host text NULL, + path text NULL, + query text NULL, PRIMARY KEY (session_id, timestamp, seq_index) ); CREATE INDEX IF NOT EXISTS requests_url_idx ON events_common.requests (url); @@ -1212,6 +1250,12 @@ $$ CREATE INDEX IF NOT EXISTS requests_response_body_nn_idx ON events_common.requests (response_body) WHERE response_body IS NOT NULL; CREATE INDEX IF NOT EXISTS requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL; CREATE INDEX IF NOT EXISTS requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL; + CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; END IF; @@ -1219,5 +1263,244 @@ $$ $$ LANGUAGE plpgsql; +INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, + view_type) +VALUES ('Captured sessions', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 +}', true, true, true, 'count_sessions', 'predefined', 'overview'), + ('Request Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_request_load_time', 'predefined', 'overview'), + ('Page Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_page_load_time', 'predefined', 'overview'), + ('Image Load Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_image_load_time', 'predefined', 'overview'), + ('DOM Content Load Start', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'), + ('First Meaningful paint', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'), + ('No. of Visited Pages', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_visited_pages', 'predefined', 'overview'), + ('Session Duration', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_session_duration', 'predefined', 'overview'), + ('DOM Build Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'), + ('Pages Response Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'), + ('Response Time', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_response_time', 'predefined', 'overview'), + ('First Paint', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_first_paint', 'predefined', 'overview'), + ('DOM Content Loaded', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'), + ('Time Till First byte', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'), + ('Time To Interactive', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'), + ('Captured requests', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'count_requests', 'predefined', 'overview'), + ('Time To Render', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_time_to_render', 'predefined', 'overview'), + ('Memory Consumption', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'), + ('CPU Load', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_cpu', 'predefined', 'overview'), + ('Frame rate', 'overview', '{ + "col": 1, + "row": 1, + "position": 0 + }', true, true, true, 'avg_fps', 'predefined', 'overview'), + + ('Sessions Affected by JS Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'), + ('Top Domains with 4xx Fetch Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'), + ('Top Domains with 5xx Fetch Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'), + ('Errors per Domain', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'errors_per_domains', 'predefined', 'table'), + ('Fetch Calls with Errors', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'calls_errors', 'predefined', 'table'), + ('Errors by Type', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'errors_per_type', 'predefined', 'barChart'), + ('Errors by Origin', 'errors', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'), + + ('Speed Index by Location', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'speed_location', 'predefined', 'map'), + ('Slowest Domains', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'slowest_domains', 'predefined', 'table'), + ('Sessions per Browser', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'sessions_per_browser', 'predefined', 'table'), + ('Time To Render', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'time_to_render', 'predefined', 'areaChart'), + ('Sessions Impacted by Slow Pages', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'), + ('Memory Consumption', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'memory_consumption', 'predefined', 'areaChart'), + ('CPU Load', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'cpu', 'predefined', 'areaChart'), + ('Frame Rate', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'fps', 'predefined', 'areaChart'), + ('Crashes', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'crashes', 'predefined', 'areaChart'), + ('Resources Loaded vs Visually Complete', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'), + ('DOM Build Time', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'), + ('Pages Response Time', 'performance', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'pages_response_time', 'predefined', 'areaChart'), + ('Pages Response Time Distribution', 'performance', '{ + "col": 4, + "row": 2, + "position": 0 + }', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'), + + ('Missing Resources', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'missing_resources', 'predefined', 'table'), + ('Slowest Resources', 'resources', '{ + "col": 4, + "row": 2, + "position": 0 + }', true, true, true, 'slowest_resources', 'predefined', 'table'), + ('Resources Fetch Time', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_loading_time', 'predefined', 'table'), + ('Resource Loaded vs Response End', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'), + ('Breakdown of Loaded Resources', 'resources', '{ + "col": 2, + "row": 2, + "position": 0 + }', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart') +ON CONFLICT (predefined_key) DO UPDATE + SET name=excluded.name, + category=excluded.category, + default_config=excluded.default_config, + is_predefined=excluded.is_predefined, + is_template=excluded.is_template, + is_public=excluded.is_public, + metric_type=excluded.metric_type, + view_type=excluded.view_type; COMMIT; \ No newline at end of file diff --git a/ee/utilities/.gitignore b/ee/utilities/.gitignore index fc05191e0..0aaf625c9 100644 --- a/ee/utilities/.gitignore +++ b/ee/utilities/.gitignore @@ -10,4 +10,6 @@ build.sh servers/peerjs-server.js servers/sourcemaps-handler.js servers/sourcemaps-server.js -#servers/websocket.js \ No newline at end of file +#servers/websocket.js +/utils +/Dockerfile diff --git a/ee/utilities/package-lock.json b/ee/utilities/package-lock.json index e7974f3f2..98ef3f745 100644 --- a/ee/utilities/package-lock.json +++ b/ee/utilities/package-lock.json @@ -1,22 +1,19 @@ { - "name": "utilities_server", + "name": "utilities-server", "version": "1.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { - "name": "utilities_server", + "name": "utilities-server", "version": "1.0.0", - "license": "MIT", + "license": "Elastic License 2.0 (ELv2)", "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "@socket.io/redis-adapter": "^7.1.0", - "aws-sdk": "^2.992.0", "express": "^4.17.1", - "peer": "^0.6.1", "redis": "^4.0.3", "socket.io": "^4.4.1", - "source-map": "^0.7.3", "ua-parser-js": "^1.0.2", "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" } @@ -41,9 +38,9 @@ } }, "node_modules/@node-redis/client": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.4.tgz", - "integrity": "sha512-IM/NRAqg7MvNC3bIRQipXGrEarunrdgvrbAzsd3ty93LSHi/M+ybQulOERQi8a3M+P5BL8HenwXjiIoKm6ml2g==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.5.tgz", + "integrity": "sha512-ESZ3bd1f+od62h4MaBLKum+klVJfA4wAeLHcVQBkoXa1l0viFesOWnakLQqKg+UyrlJhZmXJWtu0Y9v7iTMrig==", "dependencies": { "cluster-key-slot": "1.1.0", "generic-pool": "3.8.2", @@ -71,9 +68,9 @@ } }, "node_modules/@node-redis/search": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.3.tgz", - "integrity": "sha512-rsrzkGWI84di/uYtEctS/4qLusWt0DESx/psjfB0TFpORDhe7JfC0h8ary+eHulTksumor244bXLRSqQXbFJmw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.5.tgz", + "integrity": "sha512-MCOL8iCKq4v+3HgEQv8zGlSkZyXSXtERgrAJ4TSryIG/eLFy84b57KmNNa/V7M1Q2Wd2hgn2nPCGNcQtk1R1OQ==", "peerDependencies": { "@node-redis/client": "^1.0.0" } @@ -108,28 +105,11 @@ "node": ">=10.0.0" } }, - "node_modules/@types/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, "node_modules/@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", "integrity": "sha512-SRXjM+tfsSlA9VuG8hGO2nft2p8zjXCK1VcC6N4NXbBbYbSia9kzCChYQajIjzIqOOOuh5Ock6MmV2oux4jDZQ==" }, - "node_modules/@types/connect": { - "version": "3.4.35", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", - "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -140,63 +120,10 @@ "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz", "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, - "node_modules/@types/express": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", - "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.17.28", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz", - "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*" - } - }, - "node_modules/@types/mime": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", - "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" - }, "node_modules/@types/node": { - "version": "17.0.21", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", - "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==" - }, - "node_modules/@types/qs": { - "version": "6.9.7", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", - "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" - }, - "node_modules/@types/range-parser": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", - "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" - }, - "node_modules/@types/serve-static": { - "version": "1.13.10", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", - "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/ws": { - "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", - "dependencies": { - "@types/node": "*" - } + "version": "17.0.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", + "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" }, "node_modules/accepts": { "version": "1.3.8", @@ -210,28 +137,6 @@ "node": ">= 0.6" } }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -245,44 +150,6 @@ "node": ">=0.8" } }, - "node_modules/aws-sdk": { - "version": "2.1087.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1087.0.tgz", - "integrity": "sha512-m5EERT29Fwh2cv3SaSdygeAjJBXnjSaXRRERy70bf6PQ7KgmASJouBxY11g5G7LTEPK/yfB0TGshujKh3hEtPA==", - "dependencies": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.16.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.3.2", - "xml2js": "0.4.19" - }, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/base64id": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", @@ -324,16 +191,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "node_modules/buffer": { - "version": "4.9.2", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", - "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", - "dependencies": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4", - "isarray": "^1.0.0" - } - }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -370,16 +227,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cliui": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^6.2.0" - } - }, "node_modules/cluster-key-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", @@ -388,22 +235,6 @@ "node": ">=0.10.0" } }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", @@ -459,9 +290,9 @@ } }, "node_modules/debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dependencies": { "ms": "2.1.2" }, @@ -474,14 +305,6 @@ } } }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -500,11 +323,6 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, "node_modules/encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -544,26 +362,6 @@ "node": ">=10.0.0" } }, - "node_modules/engine.io/node_modules/ws": { - "version": "8.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", - "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -577,14 +375,6 @@ "node": ">= 0.6" } }, - "node_modules/events": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=", - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/express": { "version": "4.17.3", "resolved": "https://registry.npmjs.org/express/-/express-4.17.3.tgz", @@ -676,18 +466,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -712,14 +490,6 @@ "node": ">= 4" } }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, "node_modules/http-errors": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", @@ -746,11 +516,6 @@ "node": ">=0.10.0" } }, - "node_modules/ieee754": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", - "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" - }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -773,27 +538,6 @@ "node": ">= 0.10" } }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "node_modules/jmespath": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", - "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/json-schema": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", @@ -813,17 +557,6 @@ "verror": "1.10.0" } }, - "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", @@ -841,12 +574,12 @@ } }, "node_modules/maxmind": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.5.tgz", - "integrity": "sha512-ak0TABuO664C5zXyQH5u13WmtdTwxxXLGOy1e51ZRrp/cEH9xfOcG20F51TcNhVyDos13Ys94kxN8/elg9Ri4Q==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.6.tgz", + "integrity": "sha512-CwnEZqJX0T6b2rWrc0/V3n9hL/hWAMEn7fY09077YJUHiHx7cn/esA2ZIz8BpYLSJUf7cGVel0oUJa9jMwyQpg==", "dependencies": { "mmdb-lib": "2.0.2", - "tiny-lru": "7.0.6" + "tiny-lru": "8.0.2" }, "engines": { "node": ">=10", @@ -886,19 +619,19 @@ } }, "node_modules/mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "dependencies": { - "mime-db": "1.51.0" + "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" @@ -950,39 +683,6 @@ "node": ">= 0.8" } }, - "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -991,50 +691,11 @@ "node": ">= 0.8" } }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "engines": { - "node": ">=8" - } - }, "node_modules/path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, - "node_modules/peer": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/peer/-/peer-0.6.1.tgz", - "integrity": "sha512-zPJSPoZvo+83sPJNrW8o93QTktx7dKk67965RRDDNAIelWw1ZwE6ZmmhsvRrdNRlK0knQb3rR8GBdZlbWzCYJw==", - "dependencies": { - "@types/cors": "^2.8.6", - "@types/express": "^4.17.3", - "@types/ws": "^7.2.3", - "body-parser": "^1.19.0", - "cors": "^2.8.5", - "express": "^4.17.1", - "uuid": "^3.4.0", - "ws": "^7.2.3", - "yargs": "^15.3.1" - }, - "bin": { - "peerjs": "bin/peerjs" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/peer/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -1047,11 +708,6 @@ "node": ">= 0.10" } }, - "node_modules/punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" - }, "node_modules/qs": { "version": "6.9.7", "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.7.tgz", @@ -1063,15 +719,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/quick-lru": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", @@ -1106,15 +753,15 @@ } }, "node_modules/redis": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.4.tgz", - "integrity": "sha512-KaM1OAj/nGrSeybmmOWSMY0LXTGT6FVWgUZZrd2MYzXKJ+VGtqVaciGQeNMfZiQX+kDM8Ke4uttb54m2rm6V0A==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.6.tgz", + "integrity": "sha512-IaPAxgF5dV0jx+A9l6yd6R9/PAChZIoAskDVRzUODeLDNhsMlq7OLLTmu0AwAr0xjrJ1bibW5xdpRwqIQ8Q0Xg==", "dependencies": { "@node-redis/bloom": "1.0.1", - "@node-redis/client": "1.0.4", + "@node-redis/client": "1.0.5", "@node-redis/graph": "1.0.0", "@node-redis/json": "1.0.2", - "@node-redis/search": "1.0.3", + "@node-redis/search": "1.0.5", "@node-redis/time-series": "1.0.2" } }, @@ -1137,19 +784,6 @@ "node": ">=4" } }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -1174,11 +808,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "node_modules/sax": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" - }, "node_modules/send": { "version": "0.17.2", "resolved": "https://registry.npmjs.org/send/-/send-0.17.2.tgz", @@ -1234,11 +863,6 @@ "node": ">= 0.8.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -1278,14 +902,6 @@ "node": ">=10.0.0" } }, - "node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "engines": { - "node": ">= 8" - } - }, "node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -1294,34 +910,10 @@ "node": ">= 0.6" } }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tiny-lru": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-7.0.6.tgz", - "integrity": "sha512-zNYO0Kvgn5rXzWpL0y3RS09sMK67eGaQj9805jlK9G6pSadfriTczzLHFXa/xcW4mIRfmlB9HyQ/+SgL0V1uow==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-8.0.2.tgz", + "integrity": "sha512-ApGvZ6vVvTNdsmt676grvCkUCGwzG9IqXma5Z07xJgiC5L7akUMof5U8G2JTI9Rz/ovtVhJBlY6mNhEvtjzOIg==", "engines": { "node": ">=6" } @@ -1388,15 +980,6 @@ "node": ">= 0.8" } }, - "node_modules/url": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", - "dependencies": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1405,15 +988,6 @@ "node": ">= 0.4.0" } }, - "node_modules/uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/uWebSockets.js": { "version": "20.6.0", "resolved": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#a58e810e47a23696410f6073c8c905dc38f75da5" @@ -1439,30 +1013,12 @@ "extsprintf": "^1.2.0" } }, - "node_modules/which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" - }, - "node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/ws": { - "version": "7.5.7", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", - "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "engines": { - "node": ">=8.3.0" + "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", @@ -1477,73 +1033,10 @@ } } }, - "node_modules/xml2js": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", - "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", - "dependencies": { - "sax": ">=0.6.0", - "xmlbuilder": "~9.0.1" - } - }, - "node_modules/xmlbuilder": { - "version": "9.0.7", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/y18n": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", - "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" - }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "node_modules/yargs": { - "version": "15.4.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", - "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", - "dependencies": { - "cliui": "^6.0.0", - "decamelize": "^1.2.0", - "find-up": "^4.1.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^4.2.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^18.1.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs-parser": { - "version": "18.1.3", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", - "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/yargs-parser/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "engines": { - "node": ">=6" - } } }, "dependencies": { @@ -1565,9 +1058,9 @@ "requires": {} }, "@node-redis/client": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.4.tgz", - "integrity": "sha512-IM/NRAqg7MvNC3bIRQipXGrEarunrdgvrbAzsd3ty93LSHi/M+ybQulOERQi8a3M+P5BL8HenwXjiIoKm6ml2g==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.5.tgz", + "integrity": "sha512-ESZ3bd1f+od62h4MaBLKum+klVJfA4wAeLHcVQBkoXa1l0viFesOWnakLQqKg+UyrlJhZmXJWtu0Y9v7iTMrig==", "requires": { "cluster-key-slot": "1.1.0", "generic-pool": "3.8.2", @@ -1588,9 +1081,9 @@ "requires": {} }, "@node-redis/search": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.3.tgz", - "integrity": "sha512-rsrzkGWI84di/uYtEctS/4qLusWt0DESx/psjfB0TFpORDhe7JfC0h8ary+eHulTksumor244bXLRSqQXbFJmw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.5.tgz", + "integrity": "sha512-MCOL8iCKq4v+3HgEQv8zGlSkZyXSXtERgrAJ4TSryIG/eLFy84b57KmNNa/V7M1Q2Wd2hgn2nPCGNcQtk1R1OQ==", "requires": {} }, "@node-redis/time-series": { @@ -1615,28 +1108,11 @@ "uid2": "0.0.3" } }, - "@types/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", - "requires": { - "@types/connect": "*", - "@types/node": "*" - } - }, "@types/component-emitter": { "version": "1.2.11", "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.11.tgz", "integrity": "sha512-SRXjM+tfsSlA9VuG8hGO2nft2p8zjXCK1VcC6N4NXbBbYbSia9kzCChYQajIjzIqOOOuh5Ock6MmV2oux4jDZQ==" }, - "@types/connect": { - "version": "3.4.35", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", - "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", - "requires": { - "@types/node": "*" - } - }, "@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -1647,63 +1123,10 @@ "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz", "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" }, - "@types/express": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", - "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", - "requires": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "@types/express-serve-static-core": { - "version": "4.17.28", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz", - "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==", - "requires": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*" - } - }, - "@types/mime": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", - "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" - }, "@types/node": { - "version": "17.0.21", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", - "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==" - }, - "@types/qs": { - "version": "6.9.7", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", - "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" - }, - "@types/range-parser": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", - "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" - }, - "@types/serve-static": { - "version": "1.13.10", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", - "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", - "requires": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "@types/ws": { - "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", - "requires": { - "@types/node": "*" - } + "version": "17.0.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.25.tgz", + "integrity": "sha512-wANk6fBrUwdpY4isjWrKTufkrXdu1D2YHCot2fD/DfWxF5sMrVSA+KN7ydckvaTCh0HiqX9IVl0L5/ZoXg5M7w==" }, "accepts": { "version": "1.3.8", @@ -1714,19 +1137,6 @@ "negotiator": "0.6.3" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -1737,27 +1147,6 @@ "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, - "aws-sdk": { - "version": "2.1087.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1087.0.tgz", - "integrity": "sha512-m5EERT29Fwh2cv3SaSdygeAjJBXnjSaXRRERy70bf6PQ7KgmASJouBxY11g5G7LTEPK/yfB0TGshujKh3hEtPA==", - "requires": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.16.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "uuid": "3.3.2", - "xml2js": "0.4.19" - } - }, - "base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" - }, "base64id": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", @@ -1795,16 +1184,6 @@ } } }, - "buffer": { - "version": "4.9.2", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", - "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", - "requires": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4", - "isarray": "^1.0.0" - } - }, "bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -1826,34 +1205,11 @@ "type-fest": "^1.2.1" } }, - "cliui": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^6.2.0" - } - }, "cluster-key-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", @@ -1897,18 +1253,13 @@ } }, "debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" - }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -1924,11 +1275,6 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -1949,14 +1295,6 @@ "debug": "~4.3.1", "engine.io-parser": "~5.0.3", "ws": "~8.2.3" - }, - "dependencies": { - "ws": { - "version": "8.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", - "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", - "requires": {} - } } }, "engine.io-parser": { @@ -1977,11 +1315,6 @@ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" }, - "events": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" - }, "express": { "version": "4.17.3", "resolved": "https://registry.npmjs.org/express/-/express-4.17.3.tgz", @@ -2068,15 +1401,6 @@ } } }, - "find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "requires": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - } - }, "forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -2092,11 +1416,6 @@ "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.8.2.tgz", "integrity": "sha512-nGToKy6p3PAbYQ7p1UlWl6vSPwfwU6TMSWK7TTu+WUY4ZjyZQGniGGt2oNVvyNSpyZYSB43zMXVLcBm08MTMkg==" }, - "get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" - }, "http-errors": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", @@ -2117,11 +1436,6 @@ "safer-buffer": ">= 2.1.2 < 3" } }, - "ieee754": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", - "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" - }, "inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2141,21 +1455,6 @@ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "jmespath": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", - "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==" - }, "json-schema": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", @@ -2172,14 +1471,6 @@ "verror": "1.10.0" } }, - "locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "requires": { - "p-locate": "^4.1.0" - } - }, "lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", @@ -2191,12 +1482,12 @@ "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==" }, "maxmind": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.5.tgz", - "integrity": "sha512-ak0TABuO664C5zXyQH5u13WmtdTwxxXLGOy1e51ZRrp/cEH9xfOcG20F51TcNhVyDos13Ys94kxN8/elg9Ri4Q==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.6.tgz", + "integrity": "sha512-CwnEZqJX0T6b2rWrc0/V3n9hL/hWAMEn7fY09077YJUHiHx7cn/esA2ZIz8BpYLSJUf7cGVel0oUJa9jMwyQpg==", "requires": { "mmdb-lib": "2.0.2", - "tiny-lru": "7.0.6" + "tiny-lru": "8.0.2" } }, "media-typer": { @@ -2220,16 +1511,16 @@ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { - "version": "1.51.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", - "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==" + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { - "version": "2.1.34", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", - "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "requires": { - "mime-db": "1.51.0" + "mime-db": "1.52.0" } }, "mmdb-lib": { @@ -2265,65 +1556,16 @@ "ee-first": "1.1.1" } }, - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "requires": { - "p-limit": "^2.2.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" - }, "parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" - }, "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, - "peer": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/peer/-/peer-0.6.1.tgz", - "integrity": "sha512-zPJSPoZvo+83sPJNrW8o93QTktx7dKk67965RRDDNAIelWw1ZwE6ZmmhsvRrdNRlK0knQb3rR8GBdZlbWzCYJw==", - "requires": { - "@types/cors": "^2.8.6", - "@types/express": "^4.17.3", - "@types/ws": "^7.2.3", - "body-parser": "^1.19.0", - "cors": "^2.8.5", - "express": "^4.17.1", - "uuid": "^3.4.0", - "ws": "^7.2.3", - "yargs": "^15.3.1" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -2333,21 +1575,11 @@ "ipaddr.js": "1.9.1" } }, - "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" - }, "qs": { "version": "6.9.7", "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.7.tgz", "integrity": "sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw==" }, - "querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" - }, "quick-lru": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", @@ -2370,15 +1602,15 @@ } }, "redis": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.4.tgz", - "integrity": "sha512-KaM1OAj/nGrSeybmmOWSMY0LXTGT6FVWgUZZrd2MYzXKJ+VGtqVaciGQeNMfZiQX+kDM8Ke4uttb54m2rm6V0A==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.6.tgz", + "integrity": "sha512-IaPAxgF5dV0jx+A9l6yd6R9/PAChZIoAskDVRzUODeLDNhsMlq7OLLTmu0AwAr0xjrJ1bibW5xdpRwqIQ8Q0Xg==", "requires": { "@node-redis/bloom": "1.0.1", - "@node-redis/client": "1.0.4", + "@node-redis/client": "1.0.5", "@node-redis/graph": "1.0.0", "@node-redis/json": "1.0.2", - "@node-redis/search": "1.0.3", + "@node-redis/search": "1.0.5", "@node-redis/time-series": "1.0.2" } }, @@ -2395,16 +1627,6 @@ "redis-errors": "^1.0.0" } }, - "require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" - }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" - }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -2415,11 +1637,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "sax": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" - }, "send": { "version": "0.17.2", "resolved": "https://registry.npmjs.org/send/-/send-0.17.2.tgz", @@ -2473,11 +1690,6 @@ "send": "0.17.2" } }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, "setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -2511,38 +1723,15 @@ "debug": "~4.3.1" } }, - "source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, "tiny-lru": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-7.0.6.tgz", - "integrity": "sha512-zNYO0Kvgn5rXzWpL0y3RS09sMK67eGaQj9805jlK9G6pSadfriTczzLHFXa/xcW4mIRfmlB9HyQ/+SgL0V1uow==" + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-8.0.2.tgz", + "integrity": "sha512-ApGvZ6vVvTNdsmt676grvCkUCGwzG9IqXma5Z07xJgiC5L7akUMof5U8G2JTI9Rz/ovtVhJBlY6mNhEvtjzOIg==" }, "toidentifier": { "version": "1.0.1", @@ -2578,25 +1767,11 @@ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" }, - "url": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", - "requires": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" - }, "uWebSockets.js": { "version": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#a58e810e47a23696410f6073c8c905dc38f75da5", "from": "uWebSockets.js@github:uNetworking/uWebSockets.js#v20.6.0" @@ -2616,84 +1791,16 @@ "extsprintf": "^1.2.0" } }, - "which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" - }, - "wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, "ws": { - "version": "7.5.7", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.7.tgz", - "integrity": "sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "requires": {} }, - "xml2js": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", - "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", - "requires": { - "sax": ">=0.6.0", - "xmlbuilder": "~9.0.1" - } - }, - "xmlbuilder": { - "version": "9.0.7", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" - }, - "y18n": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", - "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" - }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yargs": { - "version": "15.4.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", - "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", - "requires": { - "cliui": "^6.0.0", - "decamelize": "^1.2.0", - "find-up": "^4.1.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^4.2.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^18.1.2" - } - }, - "yargs-parser": { - "version": "18.1.3", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", - "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - }, - "dependencies": { - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" - } - } } } } diff --git a/ee/utilities/package.json b/ee/utilities/package.json index 87e1d1596..99c2666da 100644 --- a/ee/utilities/package.json +++ b/ee/utilities/package.json @@ -1,5 +1,5 @@ { - "name": "utilities_server", + "name": "utilities-server", "version": "1.0.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", @@ -12,7 +12,7 @@ "url": "git+https://github.com/openreplay/openreplay.git" }, "author": "KRAIEM Taha Yassine ", - "license": "MIT", + "license": "Elastic License 2.0 (ELv2)", "bugs": { "url": "https://github.com/openreplay/openreplay/issues" }, @@ -20,12 +20,9 @@ "dependencies": { "@maxmind/geoip2-node": "^3.4.0", "@socket.io/redis-adapter": "^7.1.0", - "aws-sdk": "^2.992.0", "express": "^4.17.1", - "peer": "^0.6.1", "redis": "^4.0.3", "socket.io": "^4.4.1", - "source-map": "^0.7.3", "ua-parser-js": "^1.0.2", "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.6.0" } diff --git a/ee/utilities/server.js b/ee/utilities/server.js index d049faa19..429b37c25 100644 --- a/ee/utilities/server.js +++ b/ee/utilities/server.js @@ -1,72 +1,42 @@ -var sourcemapsReaderServer = require('./servers/sourcemaps-server'); -var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server'); -var express = require('express'); -const {ExpressPeerServer} = require('peer'); -var socket; +const dumps = require('./utils/HeapSnapshot'); +const {request_logger} = require('./utils/helper'); +const express = require('express'); +let socket; if (process.env.redis === "true") { - console.log("Using Redis"); socket = require("./servers/websocket-cluster"); } else { socket = require("./servers/websocket"); } const HOST = '0.0.0.0'; -const PORT = 9000; - -var app = express(); +const PORT = 9001; let debug = process.env.debug === "1" || false; -const request_logger = (identity) => { - return (req, res, next) => { - debug && console.log(identity, new Date().toTimeString(), 'REQUEST', req.method, req.originalUrl); - res.on('finish', function () { - if (this.statusCode !== 200 || debug) { - console.log(new Date().toTimeString(), 'RESPONSE', req.method, req.originalUrl, this.statusCode); - } - }) - - next(); - } -}; -app.use(request_logger("[app]")); - - -app.use('/sourcemaps', sourcemapsReaderServer); -app.use('/assist', peerRouter); - -const server = app.listen(PORT, HOST, () => { - console.log(`App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); -}); - -const peerServer = ExpressPeerServer(server, { - debug: true, - path: '/', - proxied: true, - allow_discovery: false -}); -peerServer.on('connection', peerConnection); -peerServer.on('disconnect', peerDisconnect); -peerServer.on('error', peerError); -app.use('/', peerServer); -app.enable('trust proxy'); +const PREFIX = process.env.prefix || `/assist` if (process.env.uws !== "true") { - var wsapp = express(); + let wsapp = express(); wsapp.use(request_logger("[wsapp]")); - wsapp.use('/assist', socket.wsRouter); - - const wsserver = wsapp.listen(PORT + 1, HOST, () => { - console.log(`WS App listening on http://${HOST}:${PORT + 1}`); + wsapp.use(request_logger("[app]")); + wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => { + res.statusCode = 200; + res.end("ok!"); + } + ); + wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router); + wsapp.use(`${PREFIX}/${process.env.S3_KEY}`, socket.wsRouter); + wsapp.enable('trust proxy'); + const wsserver = wsapp.listen(PORT, HOST, () => { + console.log(`WS App listening on http://${HOST}:${PORT}`); console.log('Press Ctrl+C to quit.'); }); - wsapp.enable('trust proxy'); + socket.start(wsserver); - module.exports = {wsserver, server}; + module.exports = {wsserver}; } else { console.log("Using uWebSocket"); const {App} = require("uWebSockets.js"); - const PREFIX = process.env.prefix || '/assist' + const uapp = new App(); @@ -75,6 +45,7 @@ if (process.env.uws !== "true") { } uapp.get(PREFIX, healthFn); uapp.get(`${PREFIX}/`, healthFn); + uapp.get(`${PREFIX}/${process.env.S3_KEY}`, healthFn); /* Either onAborted or simply finished request */ @@ -110,11 +81,11 @@ if (process.env.uws !== "true") { socket.start(uapp); - uapp.listen(HOST, PORT + 1, (token) => { + uapp.listen(HOST, PORT, (token) => { if (!token) { console.warn("port already in use"); } - console.log(`WS App listening on http://${HOST}:${PORT + 1}`); + console.log(`WS App listening on http://${HOST}:${PORT}`); console.log('Press Ctrl+C to quit.'); }); @@ -124,5 +95,5 @@ if (process.env.uws !== "true") { debug && console.log(err.stack); // process.exit(1); }); - module.exports = {uapp, server}; + module.exports = {uapp}; } \ No newline at end of file diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js index c044043a5..904aaea17 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/utilities/servers/websocket-cluster.js @@ -1,8 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const geoip2Reader = require('@maxmind/geoip2-node').Reader; -const {extractPeerId} = require('./peerjs-server'); +const {extractPeerId} = require('../utils/helper'); +const {geoip} = require('../utils/geoIP'); const {createAdapter} = require("@socket.io/redis-adapter"); const {createClient} = require("redis"); const wsRouter = express.Router(); @@ -14,10 +14,11 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; +const SESSION_RECONNECTED = "SESSION_RECONNECTED"; const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379"; const pubClient = createClient({url: REDIS_URL}); const subClient = pubClient.duplicate(); - +console.log(`Using Redis: ${REDIS_URL}`); let io; const debug = process.env.debug === "1" || false; @@ -86,8 +87,7 @@ const extractProjectKeyFromRequest = function (req) { const getAvailableRooms = async function () { - let rooms = await io.of('/').adapter.allRooms(); - return rooms; + return io.of('/').adapter.allRooms(); } const respond = function (res, data) { @@ -125,7 +125,7 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); +wsRouter.get(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -151,7 +151,7 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -159,7 +159,7 @@ const socketsLive = async function (req, res) { let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); + let {projectKey} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -174,12 +174,12 @@ const socketsLive = async function (req, res) { } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]); } } respond(res, liveSessions); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -188,7 +188,7 @@ const socketsLiveByProject = async function (req, res) { let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); + let {projectKey} = extractPeerId(peerId); if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -203,12 +203,12 @@ const socketsLiveByProject = async function (req, res) { } } } - liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]); + liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey] || []); } } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -254,6 +254,7 @@ async function get_all_agents_ids(io, socket) { return agents; } + function extractSessionInfo(socket) { if (socket.handshake.query.sessionInfo !== undefined) { debug && console.log("received headers"); @@ -267,21 +268,11 @@ function extractSessionInfo(socket) { socket.handshake.query.sessionInfo.userDevice = ua.device.model || null; socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop'; socket.handshake.query.sessionInfo.userCountry = null; - - const options = { - // you can use options like `cache` or `watchForUpdates` - }; - // console.log("Looking for MMDB file in " + process.env.MAXMINDDB_FILE); - geoip2Reader.open(process.env.MAXMINDDB_FILE, options) - .then(reader => { - debug && console.log("looking for location of "); - debug && console.log(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); - let country = reader.country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); - socket.handshake.query.sessionInfo.userCountry = country.country.isoCode; - }) - .catch(error => { - console.error(error); - }); + if (geoip() !== null) { + debug && console.log(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`); + let country = geoip().country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); + socket.handshake.query.sessionInfo.userCountry = country.country.isoCode; + } } } @@ -293,10 +284,6 @@ module.exports = { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; socket.identity = socket.handshake.query.identity; - const {projectKey, sessionId} = extractPeerId(socket.peerId); - socket.sessionId = sessionId; - socket.projectKey = projectKey; - socket.lastMessageReceivedAt = Date.now(); let {c_sessions, c_agents} = await sessions_agents_count(io, socket); if (socket.identity === IDENTITIES.session) { if (c_sessions > 0) { @@ -309,6 +296,7 @@ module.exports = { debug && console.log(`notifying new session about agent-existence`); let agents_ids = await get_all_agents_ids(io, socket); io.to(socket.id).emit(AGENTS_CONNECTED, agents_ids); + socket.to(socket.peerId).emit(SESSION_RECONNECTED, socket.id); } } else if (c_sessions <= 0) { @@ -359,7 +347,6 @@ module.exports = { }); socket.onAny(async (eventName, ...args) => { - socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); @@ -383,7 +370,7 @@ module.exports = { let rooms = await io.of('/').adapter.allRooms(); let validRooms = []; console.log(` ====== Rooms: ${rooms.size} ====== `); - const arr = Array.from(rooms) + // const arr = Array.from(rooms) // const filtered = arr.filter(room => !room[1].has(room[0])) for (let i of rooms) { let {projectKey, sessionId} = extractPeerId(i); @@ -402,11 +389,16 @@ module.exports = { console.error(e); } }, 20000, io); - Promise.all([pubClient.connect(), subClient.connect()]).then(() => { - io.adapter(createAdapter(pubClient, subClient)); - console.log("> redis connected."); - // io.listen(3000); - }); + Promise.all([pubClient.connect(), subClient.connect()]) + .then(() => { + io.adapter(createAdapter(pubClient, subClient)); + console.log("> redis connected."); + }) + .catch((err) => { + console.log("> redis connection error"); + debug && console.error(err); + process.exit(2); + }); }, handlers: { socketsList, diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js index 0bd397d96..a20f23078 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/utilities/servers/websocket.js @@ -1,8 +1,8 @@ const _io = require('socket.io'); const express = require('express'); const uaParser = require('ua-parser-js'); -const geoip2Reader = require('@maxmind/geoip2-node').Reader; -const {extractPeerId} = require('./peerjs-server'); +const {extractPeerId} = require('../utils/helper'); +const {geoip} = require('../utils/geoIP'); const wsRouter = express.Router(); const UPDATE_EVENT = "UPDATE_SESSION"; const IDENTITIES = {agent: 'agent', session: 'session'}; @@ -12,6 +12,7 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED"; const AGENTS_CONNECTED = "AGENTS_CONNECTED"; const NO_SESSIONS = "SESSION_DISCONNECTED"; const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED"; +const SESSION_RECONNECTED = "SESSION_RECONNECTED"; let io; const debug = process.env.debug === "1" || false; @@ -107,7 +108,7 @@ const socketsList = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList); +wsRouter.get(`/sockets-list`, socketsList); const socketsListByProject = async function (req, res) { debug && console.log("[WS]looking for available sessions"); @@ -133,7 +134,7 @@ const socketsListByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject); +wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject); const socketsLive = async function (req, res) { debug && console.log("[WS]looking for all available LIVE sessions"); @@ -141,7 +142,7 @@ const socketsLive = async function (req, res) { let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); + let {projectKey} = extractPeerId(peerId); if (projectKey !== undefined) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -160,7 +161,7 @@ const socketsLive = async function (req, res) { } respond(res, liveSessions); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive); +wsRouter.get(`/sockets-live`, socketsLive); const socketsLiveByProject = async function (req, res) { debug && console.log("[WS]looking for available LIVE sessions"); @@ -169,7 +170,7 @@ const socketsLiveByProject = async function (req, res) { let liveSessions = {}; let rooms = await getAvailableRooms(); for (let peerId of rooms) { - let {projectKey, sessionId} = extractPeerId(peerId); + let {projectKey} = extractPeerId(peerId); if (projectKey === _projectKey) { let connected_sockets = await io.in(peerId).fetchSockets(); for (let item of connected_sockets) { @@ -188,7 +189,7 @@ const socketsLiveByProject = async function (req, res) { } respond(res, liveSessions[_projectKey] || []); } -wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject); +wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject); const findSessionSocketId = async (io, peerId) => { const connected_sockets = await io.in(peerId).fetchSockets(); @@ -232,6 +233,7 @@ async function get_all_agents_ids(io, socket) { return agents; } + function extractSessionInfo(socket) { if (socket.handshake.query.sessionInfo !== undefined) { debug && console.log("received headers"); @@ -245,21 +247,11 @@ function extractSessionInfo(socket) { socket.handshake.query.sessionInfo.userDevice = ua.device.model || null; socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop'; socket.handshake.query.sessionInfo.userCountry = null; - - const options = { - // you can use options like `cache` or `watchForUpdates` - }; - // console.log("Looking for MMDB file in " + process.env.MAXMINDDB_FILE); - geoip2Reader.open(process.env.MAXMINDDB_FILE, options) - .then(reader => { - debug && console.log("looking for location of "); - debug && console.log(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); - let country = reader.country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); - socket.handshake.query.sessionInfo.userCountry = country.country.isoCode; - }) - .catch(error => { - console.error(error); - }); + if (geoip() !== null) { + debug && console.log(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`); + let country = geoip().country(socket.handshake.headers['x-forwarded-for'] || socket.handshake.address); + socket.handshake.query.sessionInfo.userCountry = country.country.isoCode; + } } } @@ -271,10 +263,6 @@ module.exports = { debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`); socket.peerId = socket.handshake.query.peerId; socket.identity = socket.handshake.query.identity; - const {projectKey, sessionId} = extractPeerId(socket.peerId); - socket.sessionId = sessionId; - socket.projectKey = projectKey; - socket.lastMessageReceivedAt = Date.now(); let {c_sessions, c_agents} = await sessions_agents_count(io, socket); if (socket.identity === IDENTITIES.session) { if (c_sessions > 0) { @@ -287,6 +275,7 @@ module.exports = { debug && console.log(`notifying new session about agent-existence`); let agents_ids = await get_all_agents_ids(io, socket); io.to(socket.id).emit(AGENTS_CONNECTED, agents_ids); + socket.to(socket.peerId).emit(SESSION_RECONNECTED, socket.id); } } else if (c_sessions <= 0) { @@ -335,7 +324,6 @@ module.exports = { }); socket.onAny(async (eventName, ...args) => { - socket.lastMessageReceivedAt = Date.now(); if (socket.identity === IDENTITIES.session) { debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`); socket.to(socket.peerId).emit(eventName, args[0]); diff --git a/frontend/app/Router.js b/frontend/app/Router.js index 89fbdd343..ab41173bd 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -1,3 +1,4 @@ +import React, { lazy, Suspense } from 'react'; import { Switch, Route, Redirect } from 'react-router'; import { BrowserRouter, withRouter } from 'react-router-dom'; import { connect } from 'react-redux'; @@ -5,26 +6,29 @@ import { Notification } from 'UI'; import { Loader } from 'UI'; import { fetchUserInfo } from 'Duck/user'; import withSiteIdUpdater from 'HOCs/withSiteIdUpdater'; -import Login from 'Components/Login/Login'; -import ForgotPassword from 'Components/ForgotPassword/ForgotPassword'; -import UpdatePassword from 'Components/UpdatePassword/UpdatePassword'; -import ClientPure from 'Components/Client/Client'; -import OnboardingPure from 'Components/Onboarding/Onboarding'; -import SessionPure from 'Components/Session/Session'; -import LiveSessionPure from 'Components/Session/LiveSession'; -import AssistPure from 'Components/Assist'; -import BugFinderPure from 'Components/BugFinder/BugFinder'; -import DashboardPure from 'Components/Dashboard/Dashboard'; -import ErrorsPure from 'Components/Errors/Errors'; +const Login = lazy(() => import('Components/Login/Login')); +const ForgotPassword = lazy(() => import('Components/ForgotPassword/ForgotPassword')); +const UpdatePassword = lazy(() => import('Components/UpdatePassword/UpdatePassword')); +const SessionPure = lazy(() => import('Components/Session/Session')); +const LiveSessionPure = lazy(() => import('Components/Session/LiveSession')); +const OnboardingPure = lazy(() => import('Components/Onboarding/Onboarding')); +const ClientPure = lazy(() => import('Components/Client/Client')); +const AssistPure = lazy(() => import('Components/Assist')); +const BugFinderPure = lazy(() => import('Components/BugFinder/BugFinder')); +const DashboardPure = lazy(() => import('Components/Dashboard/NewDashboard')); +const ErrorsPure = lazy(() => import('Components/Errors/Errors')); +const FunnelDetails = lazy(() => import('Components/Funnels/FunnelDetails')); +const FunnelIssueDetails = lazy(() => import('Components/Funnels/FunnelIssueDetails')); +import WidgetViewPure from 'Components/Dashboard/components/WidgetView'; import Header from 'Components/Header/Header'; // import ResultsModal from 'Shared/Results/ResultsModal'; -import FunnelDetails from 'Components/Funnels/FunnelDetails'; -import FunnelIssueDetails from 'Components/Funnels/FunnelIssueDetails'; -import { fetchList as fetchIntegrationVariables } from 'Duck/customField'; +import { fetchList as fetchMetadata } from 'Duck/customField'; import { fetchList as fetchSiteList } from 'Duck/site'; import { fetchList as fetchAnnouncements } from 'Duck/announcements'; import { fetchList as fetchAlerts } from 'Duck/alerts'; import { fetchWatchdogStatus } from 'Duck/watchdogs'; +import { dashboardService } from "App/services"; +import { withStore } from 'App/mstore' import APIClient from './api_client'; import * as routes from './routes'; @@ -32,9 +36,12 @@ import { OB_DEFAULT_TAB } from 'App/routes'; import Signup from './components/Signup/Signup'; import { fetchTenants } from 'Duck/user'; import { setSessionPath } from 'Duck/sessions'; +import { ModalProvider } from './components/Modal'; +import ModalRoot from './components/Modal/ModalRoot'; const BugFinder = withSiteIdUpdater(BugFinderPure); const Dashboard = withSiteIdUpdater(DashboardPure); +const WidgetView = withSiteIdUpdater(WidgetViewPure); const Session = withSiteIdUpdater(SessionPure); const LiveSession = withSiteIdUpdater(LiveSessionPure); const Assist = withSiteIdUpdater(AssistPure); @@ -46,7 +53,15 @@ const FunnelIssue = withSiteIdUpdater(FunnelIssueDetails); const withSiteId = routes.withSiteId; const withObTab = routes.withObTab; +const METRICS_PATH = routes.metrics(); +const METRICS_DETAILS = routes.metricDetails(); + const DASHBOARD_PATH = routes.dashboard(); +const DASHBOARD_SELECT_PATH = routes.dashboardSelected(); +const DASHBOARD_METRIC_CREATE_PATH = routes.dashboardMetricCreate(); +const DASHBOARD_METRIC_DETAILS_PATH = routes.dashboardMetricDetails(); + +// const WIDGET_PATAH = routes.dashboardMetric(); const SESSIONS_PATH = routes.sessions(); const ASSIST_PATH = routes.assist(); const ERRORS_PATH = routes.errors(); @@ -62,9 +77,10 @@ const CLIENT_PATH = routes.client(); const ONBOARDING_PATH = routes.onboarding(); const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); +@withStore @withRouter @connect((state) => { - const siteId = state.getIn([ 'user', 'siteId' ]); + const siteId = state.getIn([ 'site', 'siteId' ]); const jwt = state.get('jwt'); const changePassword = state.getIn([ 'user', 'account', 'changePassword' ]); const userInfoLoading = state.getIn([ 'user', 'fetchUserInfoRequest', 'loading' ]); @@ -72,7 +88,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); jwt, siteId, changePassword, - sites: state.getIn([ 'user', 'client', 'sites' ]), + sites: state.getIn([ 'site', 'list' ]), isLoggedIn: jwt !== null && !changePassword, loading: siteId === null || userInfoLoading, email: state.getIn([ 'user', 'account', 'email' ]), @@ -87,7 +103,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); fetchUserInfo, fetchTenants, setSessionPath, - fetchIntegrationVariables, + fetchMetadata, fetchSiteList, fetchAnnouncements, fetchAlerts, @@ -108,15 +124,18 @@ class Router extends React.Component { fetchInitialData = () => { Promise.all([ this.props.fetchUserInfo().then(() => { - this.props.fetchIntegrationVariables() - }), - this.props.fetchSiteList().then(() => { - setTimeout(() => { - this.props.fetchAnnouncements(); - this.props.fetchAlerts(); - this.props.fetchWatchdogStatus(); - }, 100); - }), + this.props.fetchSiteList().then(() => { + const { mstore } = this.props + mstore.initClient(); + + setTimeout(() => { + this.props.fetchMetadata() + this.props.fetchAnnouncements(); + this.props.fetchAlerts(); + this.props.fetchWatchdogStatus(); + }, 100); + }) + }) ]) } @@ -153,54 +172,70 @@ class Router extends React.Component { {!hideHeader &&
} - - - - { - const client = new APIClient(jwt); - switch (location.pathname) { - case '/integrations/slack': - client.post('integrations/slack/add', { - code: location.search.split('=')[ 1 ], - state: tenantId, - }); - break; + }> + + + + + + { + const client = new APIClient(jwt); + switch (location.pathname) { + case '/integrations/slack': + client.post('integrations/slack/add', { + code: location.search.split('=')[ 1 ], + state: tenantId, + }); + break; + } + return ; } - return ; } - } - /> - { onboarding && - - } - { siteIdList.length === 0 && - - } - - - - - - - - - - } /> - { routes.redirects.map(([ fr, to ]) => ( - - )) } - + /> + { onboarding && + + } + {/* { siteIdList.length === 0 && + + } */} + + {/* DASHBOARD and Metrics */} + + + + + + + + + + + + + + + + } /> + { routes.redirects.map(([ fr, to ]) => ( + + )) } + + + + + + : + }> + + + + { !existingTenant && } + - : - - - - { !existingTenant && } - - ; + ; } } diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index a42f19468..98a1f4dfd 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -1,5 +1,4 @@ import store from 'App/store'; - import { queried } from './routes'; const siteIdRequiredPaths = [ @@ -24,6 +23,8 @@ const siteIdRequiredPaths = [ '/assist', '/heatmaps', '/custom_metrics', + '/dashboards', + '/metrics' // '/custom_metrics/sessions', ]; @@ -55,7 +56,7 @@ export const clean = (obj, forbidenValues = [ undefined, '' ]) => { export default class APIClient { constructor() { const jwt = store.getState().get('jwt'); - const siteId = store.getState().getIn([ 'user', 'siteId' ]); + const siteId = store.getState().getIn([ 'site', 'siteId' ]); this.init = { headers: { Accept: 'application/json', @@ -68,12 +69,16 @@ export default class APIClient { this.siteId = siteId; } - fetch(path, params, options = { clean: true }) { + fetch(path, params, options = { clean: true }) { if (params !== undefined) { const cleanedParams = options.clean ? clean(params) : params; this.init.body = JSON.stringify(cleanedParams); } + if (this.init.method === 'GET') { + delete this.init.body; + } + let fetch = window.fetch; diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html index a9d4b0f62..03300b45c 100644 --- a/frontend/app/assets/index.html +++ b/frontend/app/assets/index.html @@ -12,6 +12,7 @@ +

Loading...

diff --git a/frontend/app/components/Alerts/AlertFormModal/AlertFormModal.tsx b/frontend/app/components/Alerts/AlertFormModal/AlertFormModal.tsx index 74a2552f7..d1459eb6b 100644 --- a/frontend/app/components/Alerts/AlertFormModal/AlertFormModal.tsx +++ b/frontend/app/components/Alerts/AlertFormModal/AlertFormModal.tsx @@ -45,7 +45,7 @@ function AlertFormModal(props: Props) { const onDelete = async (instance) => { if (await confirm({ header: 'Confirm', - confirmButton: 'Yes, Delete', + confirmButton: 'Yes, delete', confirmation: `Are you sure you want to permanently delete this alert?` })) { props.remove(instance.alertId).then(() => { diff --git a/frontend/app/components/Alerts/Alerts.js b/frontend/app/components/Alerts/Alerts.js index aaa99b7a2..afe161aee 100644 --- a/frontend/app/components/Alerts/Alerts.js +++ b/frontend/app/components/Alerts/Alerts.js @@ -32,7 +32,7 @@ const Alerts = props => { const onDelete = async (instance) => { if (await confirm({ header: 'Confirm', - confirmButton: 'Yes, Delete', + confirmButton: 'Yes, delete', confirmation: `Are you sure you want to permanently delete this alert?` })) { props.remove(instance.alertId).then(() => { diff --git a/frontend/app/components/Alerts/Notifications/Notifications.js b/frontend/app/components/Alerts/Notifications/Notifications.js index a30ad824f..b4dd055a0 100644 --- a/frontend/app/components/Alerts/Notifications/Notifications.js +++ b/frontend/app/components/Alerts/Notifications/Notifications.js @@ -113,7 +113,7 @@ class Notifications extends React.Component { diff --git a/frontend/app/components/Announcements/Announcements.js b/frontend/app/components/Announcements/Announcements.js index 733b4ce37..6252e4d79 100644 --- a/frontend/app/components/Announcements/Announcements.js +++ b/frontend/app/components/Announcements/Announcements.js @@ -71,7 +71,7 @@ class Announcements extends React.Component { @@ -96,6 +96,6 @@ class Announcements extends React.Component { export default connect(state => ({ announcements: state.getIn(['announcements', 'list']), loading: state.getIn(['announcements', 'fetchList', 'loading']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), }), { fetchList, setLastRead })(Announcements); \ No newline at end of file diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx index 36bc0765b..b57f5ca35 100644 --- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx +++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx @@ -1,3 +1,4 @@ +//@ts-nocheck import React, { useState, FC, useEffect } from 'react' import VideoContainer from '../components/VideoContainer' import { Icon, Popup, Button } from 'UI' diff --git a/frontend/app/components/Assist/components/AssistActions/AassistActions.css b/frontend/app/components/Assist/components/AssistActions/AassistActions.css index 8a5758d90..77d9b7c81 100644 --- a/frontend/app/components/Assist/components/AssistActions/AassistActions.css +++ b/frontend/app/components/Assist/components/AssistActions/AassistActions.css @@ -2,3 +2,10 @@ opacity: 0.5; pointer-events: none; } + +.divider { + width: 1px; + height: 49px; + margin: 0 10px; + background-color: $gray-light; +} \ No newline at end of file diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index 58eadd472..88713a6ae 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -5,7 +5,7 @@ import cn from 'classnames' import { toggleChatWindow } from 'Duck/sessions'; import { connectPlayer } from 'Player/store'; import ChatWindow from '../../ChatWindow'; -import { callPeer, requestReleaseRemoteControl } from 'Player' +import { callPeer, requestReleaseRemoteControl, toggleAnnotation } from 'Player' import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager'; import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream'; import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream'; @@ -23,7 +23,7 @@ function onReject() { } function onError(e) { - toast.error(e); + toast.error(typeof e === 'string' ? e : e.message); } @@ -31,13 +31,14 @@ interface Props { userId: String, toggleChatWindow: (state) => void, calling: CallingState, + annotating: boolean, peerConnectionStatus: ConnectionStatus, remoteControlStatus: RemoteControlStatus, hasPermission: boolean, isEnterprise: boolean, } -function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlStatus, hasPermission, isEnterprise }: Props) { +function AssistActions({ toggleChatWindow, userId, calling, annotating, peerConnectionStatus, remoteControlStatus, hasPermission, isEnterprise }: Props) { const [ incomeStream, setIncomeStream ] = useState(null); const [ localStream, setLocalStream ] = useState(null); const [ callObject, setCallObject ] = useState<{ end: ()=>void } | null >(null); @@ -81,6 +82,23 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus return (
+ {onCall && ( + <> +
+ +
+
+ + )}
- +
+
{ export default con(connectPlayer(state => ({ calling: state.calling, + annotating: state.annotating, remoteControlStatus: state.remoteControl, peerConnectionStatus: state.peerConnectionStatus, }))(AssistActions)) diff --git a/frontend/app/components/Assist/components/SessionList/SessionList.tsx b/frontend/app/components/Assist/components/SessionList/SessionList.tsx index 73c7a3a7f..32c267588 100644 --- a/frontend/app/components/Assist/components/SessionList/SessionList.tsx +++ b/frontend/app/components/Assist/components/SessionList/SessionList.tsx @@ -8,11 +8,15 @@ interface Props { loading: boolean, list: any, session: any, - fetchLiveList: () => void, + fetchLiveList: (params: any) => void, } function SessionList(props: Props) { useEffect(() => { - props.fetchLiveList(); + const params: any = {} + if (props.session.userId) { + params.userId = props.session.userId + } + props.fetchLiveList(params); }, []) return ( diff --git a/frontend/app/components/BugFinder/AutoComplete/AutoComplete.js b/frontend/app/components/BugFinder/AutoComplete/AutoComplete.js index 6b90786b7..b528e7bb9 100644 --- a/frontend/app/components/BugFinder/AutoComplete/AutoComplete.js +++ b/frontend/app/components/BugFinder/AutoComplete/AutoComplete.js @@ -114,7 +114,7 @@ class AutoComplete extends React.PureComponent { render() { const { ddOpen, query, loading, values } = this.state; - const { + const { optionMapping = defaultOptionMapping, valueToText = defaultValueToText, placeholder = 'Type to search...', diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js index 326a1e78e..3ce09d23b 100644 --- a/frontend/app/components/BugFinder/BugFinder.js +++ b/frontend/app/components/BugFinder/BugFinder.js @@ -20,7 +20,7 @@ import { LAST_7_DAYS } from 'Types/app/period'; import { resetFunnel } from 'Duck/funnels'; import { resetFunnelFilters } from 'Duck/funnelFilters' import NoSessionsMessage from 'Shared/NoSessionsMessage'; -import TrackerUpdateMessage from 'Shared/TrackerUpdateMessage'; +// import TrackerUpdateMessage from 'Shared/TrackerUpdateMessage'; import SessionSearch from 'Shared/SessionSearch'; import MainSearchBar from 'Shared/MainSearchBar'; import { clearSearch, fetchSessions } from 'Duck/search'; @@ -53,7 +53,7 @@ const allowedQueryKeys = [ sources: state.getIn([ 'customFields', 'sources' ]), filterValues: state.get('filterValues'), favoriteList: state.getIn([ 'sessions', 'favoriteList' ]), - currentProjectId: state.getIn([ 'user', 'siteId' ]), + currentProjectId: state.getIn([ 'site', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), watchdogs: state.getIn(['watchdogs', 'list']), activeFlow: state.getIn([ 'filters', 'activeFlow' ]), @@ -130,7 +130,7 @@ export default class BugFinder extends React.PureComponent { />
- + {/* */}
diff --git a/frontend/app/components/BugFinder/CustomFilters/FilterModal.js b/frontend/app/components/BugFinder/CustomFilters/FilterModal.js index c5ab474be..5bd536de5 100644 --- a/frontend/app/components/BugFinder/CustomFilters/FilterModal.js +++ b/frontend/app/components/BugFinder/CustomFilters/FilterModal.js @@ -161,8 +161,6 @@ export default class FilterModal extends React.PureComponent { const staticFilters = preloadedFilters .filter(({ value, actualValue }) => !this.props.loading && this.test(actualValue || value)) - // console.log('filteredList', filteredList); - return (!displayed ? null :
{ loading && diff --git a/frontend/app/components/BugFinder/EventFilter/EventEditor.js b/frontend/app/components/BugFinder/EventFilter/EventEditor.js index def086416..29488a231 100644 --- a/frontend/app/components/BugFinder/EventFilter/EventEditor.js +++ b/frontend/app/components/BugFinder/EventFilter/EventEditor.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { DNDSource, DNDTarget } from 'Components/hocs/dnd'; +// import { DNDSource, DNDTarget } from 'Components/hocs/dnd'; import Event, { TYPES } from 'Types/filter/event'; import { operatorOptions } from 'Types/filter'; import { editEvent, removeEvent, clearEvents, applyFilter } from 'Duck/filters'; @@ -25,8 +25,8 @@ const getLabel = ({ type }) => { return getPlaceholder({ type }); }; -@DNDTarget('event') -@DNDSource('event') +// @DNDTarget('event') +// @DNDSource('event') @connect(state => ({ isLastEvent: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 1, }), { editEvent, removeEvent, clearEvents, applyFilter }) diff --git a/frontend/app/components/BugFinder/EventFilter/EventFilter.js b/frontend/app/components/BugFinder/EventFilter/EventFilter.js index 5bc1d1b32..5adc5a42e 100644 --- a/frontend/app/components/BugFinder/EventFilter/EventFilter.js +++ b/frontend/app/components/BugFinder/EventFilter/EventFilter.js @@ -1,6 +1,6 @@ import { connect } from 'react-redux'; import { Input } from 'semantic-ui-react'; -import { DNDContext } from 'Components/hocs/dnd'; +// import { DNDContext } from 'Components/hocs/dnd'; import { addEvent, applyFilter, moveEvent, clearEvents, edit, addCustomFilter, addAttribute, setSearchQuery, setActiveFlow, setFilterOption @@ -45,7 +45,7 @@ import SaveFilterButton from 'Shared/SaveFilterButton'; setBlink, edit, }) -@DNDContext +// @DNDContext export default class EventFilter extends React.PureComponent { state = { search: '', showFilterModal: false, showPlacehoder: true } fetchEventList = debounce(this.props.fetchEventList, 500) diff --git a/frontend/app/components/BugFinder/SessionCaptureRate/SessionCaptureRate.js b/frontend/app/components/BugFinder/SessionCaptureRate/SessionCaptureRate.js index 4c2b41218..f545bcacd 100644 --- a/frontend/app/components/BugFinder/SessionCaptureRate/SessionCaptureRate.js +++ b/frontend/app/components/BugFinder/SessionCaptureRate/SessionCaptureRate.js @@ -72,7 +72,7 @@ const SessionCaptureRate = props => { } export default connect(state => ({ - currentProjectId: state.getIn([ 'user', 'siteId' ]), + currentProjectId: state.getIn([ 'site', 'siteId' ]), captureRate: state.getIn(['watchdogs', 'captureRate']), loading: state.getIn(['watchdogs', 'savingCaptureRate', 'loading']), }), { diff --git a/frontend/app/components/BugFinder/SessionFlowList/SessionFlowList.js b/frontend/app/components/BugFinder/SessionFlowList/SessionFlowList.js index c7dca4cf8..f4962573a 100644 --- a/frontend/app/components/BugFinder/SessionFlowList/SessionFlowList.js +++ b/frontend/app/components/BugFinder/SessionFlowList/SessionFlowList.js @@ -11,7 +11,7 @@ function SessionFlowList({ activeTab, savedFilters, loading }) { diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js index f5152222a..64bf722f4 100644 --- a/frontend/app/components/BugFinder/SessionList/SessionList.js +++ b/frontend/app/components/BugFinder/SessionList/SessionList.js @@ -1,12 +1,12 @@ import { connect } from 'react-redux'; -import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI'; +import { Loader, NoContent, Button, Pagination } from 'UI'; import { applyFilter, addAttribute, addEvent } from 'Duck/filters'; -import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search'; +import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage, setScrollPosition } from 'Duck/search'; import SessionItem from 'Shared/SessionItem'; import SessionListHeader from './SessionListHeader'; import { FilterKey } from 'Types/filter/filterType'; -const ALL = 'all'; +// const ALL = 'all'; const PER_PAGE = 10; const AUTOREFRESH_INTERVAL = 3 * 60 * 1000; var timeoutId; @@ -21,6 +21,7 @@ var timeoutId; filters: state.getIn([ 'search', 'instance', 'filters' ]), metaList: state.getIn(['customFields', 'list']).map(i => i.key), currentPage: state.getIn([ 'search', 'currentPage' ]), + scrollY: state.getIn([ 'search', 'scrollY' ]), lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]), }), { applyFilter, @@ -29,24 +30,15 @@ var timeoutId; fetchSessions, addFilterByKeyAndValue, updateCurrentPage, + setScrollPosition, }) export default class SessionList extends React.PureComponent { - state = { - showPages: 1, - } + constructor(props) { super(props); this.timeout(); } - componentDidUpdate(prevProps) { - if (prevProps.loading && !this.props.loading) { - this.setState({ showPages: 1 }); - } - } - - addPage = () => this.setState({ showPages: this.state.showPages + 1 }) - onUserClick = (userId, userAnonymousId) => { if (userId) { this.props.addFilterByKeyAndValue(FilterKey.USERID, userId); @@ -76,17 +68,21 @@ export default class SessionList extends React.PureComponent { } componentWillUnmount() { + this.props.setScrollPosition(window.scrollY) clearTimeout(timeoutId) } - + componentDidMount() { + const { scrollY } = this.props; + window.scrollTo(0, scrollY); + } renderActiveTabContent(list) { const { loading, filters, - onMenuItemClick, - allList, + // onMenuItemClick, + // allList, activeTab, metaList, currentPage, @@ -95,19 +91,17 @@ export default class SessionList extends React.PureComponent { } = this.props; const _filterKeys = filters.map(i => i.key); const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID); - const { showPages } = this.state; - const displayedCount = Math.min(showPages * PER_PAGE, list.size); return (
Please try changing your search parameters.
- {allList.size > 0 && ( + {/* {allList.size > 0 && (
However, we found other sessions based on your search parameters.
@@ -117,7 +111,7 @@ export default class SessionList extends React.PureComponent { >See All
- )} + )} */}
} > @@ -148,23 +142,23 @@ export default class SessionList extends React.PureComponent { render() { const { activeTab, allList, total } = this.props; - var filteredList; + // var filteredList; - if (activeTab.type !== ALL && activeTab.type !== 'bookmark' && activeTab.type !== 'live') { // Watchdog sessions - filteredList = allList.filter(session => activeTab.fits(session)) - } else { - filteredList = allList - } + // if (activeTab.type !== ALL && activeTab.type !== 'bookmark' && activeTab.type !== 'live') { // Watchdog sessions + // filteredList = allList.filter(session => activeTab.fits(session)) + // } else { + // filteredList = allList + // } - if (activeTab.type === 'bookmark') { - filteredList = filteredList.filter(item => item.favorite) - } - const _total = activeTab.type === 'all' ? total : filteredList.size + // if (activeTab.type === 'bookmark') { + // filteredList = filteredList.filter(item => item.favorite) + // } + // const _total = activeTab.type === 'all' ? total : allList.size return (
- - { this.renderActiveTabContent(filteredList) } + + { this.renderActiveTabContent(allList) }
); } diff --git a/frontend/app/components/Client/CustomFields/CustomFields.js b/frontend/app/components/Client/CustomFields/CustomFields.js index 081f11a58..b46994f0b 100644 --- a/frontend/app/components/Client/CustomFields/CustomFields.js +++ b/frontend/app/components/Client/CustomFields/CustomFields.js @@ -13,7 +13,7 @@ import { confirm } from 'UI/Confirmation'; fields: state.getIn(['customFields', 'list']).sortBy(i => i.index), field: state.getIn(['customFields', 'instance']), loading: state.getIn(['customFields', 'fetchRequest', 'loading']), - sites: state.getIn([ 'user', 'client', 'sites' ]), + sites: state.getIn([ 'site', 'list' ]), errors: state.getIn([ 'customFields', 'saveRequest', 'errors' ]), }), { init, diff --git a/frontend/app/components/Client/Integrations/IntegrationForm.js b/frontend/app/components/Client/Integrations/IntegrationForm.js index 3481068de..239958233 100644 --- a/frontend/app/components/Client/Integrations/IntegrationForm.js +++ b/frontend/app/components/Client/Integrations/IntegrationForm.js @@ -4,8 +4,8 @@ import SiteDropdown from 'Shared/SiteDropdown'; import { save, init, edit, remove, fetchList } from 'Duck/integrations/actions'; @connect((state, { name, customPath }) => ({ - sites: state.getIn([ 'user', 'client', 'sites' ]), - initialSiteId: state.getIn([ 'user', 'siteId' ]), + sites: state.getIn([ 'site', 'list' ]), + initialSiteId: state.getIn([ 'site', 'siteId' ]), list: state.getIn([ name, 'list' ]), config: state.getIn([ name, 'instance']), saving: state.getIn([ customPath || name, 'saveRequest', 'loading']), diff --git a/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js b/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js index 16586fd1d..4eb16f868 100644 --- a/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js +++ b/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js @@ -22,7 +22,7 @@ class SlackAddForm extends React.PureComponent { remove = async (id) => { if (await confirm({ header: 'Confirm', - confirmButton: 'Yes, Delete', + confirmButton: 'Yes, delete', confirmation: `Are you sure you want to permanently delete this channel?` })) { this.props.remove(id); diff --git a/frontend/app/components/Client/Sites/NewSiteForm.js b/frontend/app/components/Client/Sites/NewSiteForm.js index 011ed8b01..4195c8c63 100644 --- a/frontend/app/components/Client/Sites/NewSiteForm.js +++ b/frontend/app/components/Client/Sites/NewSiteForm.js @@ -1,7 +1,8 @@ import { connect } from 'react-redux'; import { Input, Button, Label } from 'UI'; import { save, edit, update , fetchList } from 'Duck/site'; -import { pushNewSite, setSiteId } from 'Duck/user'; +import { pushNewSite } from 'Duck/user'; +import { setSiteId } from 'Duck/site'; import { withRouter } from 'react-router-dom'; import styles from './siteForm.css'; @@ -16,7 +17,7 @@ import styles from './siteForm.css'; update, pushNewSite, fetchList, - setSiteId + setSiteId }) @withRouter export default class NewSiteForm extends React.PureComponent { @@ -37,14 +38,17 @@ export default class NewSiteForm extends React.PureComponent { }) } else { this.props.save(this.props.site).then(() => { - const { sites } = this.props; - const site = sites.last(); - - this.props.pushNewSite(site) - if (!pathname.includes('/client')) { - this.props.setSiteId(site.id) - } - this.props.onClose(null, site) + this.props.fetchList().then(() => { + const { sites } = this.props; + const site = sites.last(); + if (!pathname.includes('/client')) { + console.log('site', site) + this.props.setSiteId(site.get('id')) + } + this.props.onClose(null, site) + }) + + // this.props.pushNewSite(site) }); } } @@ -58,17 +62,17 @@ export default class NewSiteForm extends React.PureComponent { const { site, loading } = this.props; return (
-
+
- - -
+ + +
- { this.state.existsError && -
- { "Site exists already. Please choose another one." } -
- } -
-
- ); + { this.state.existsError && +
+ { "Site exists already. Please choose another one." } +
+ } +
+ + ); } } \ No newline at end of file diff --git a/frontend/app/components/Client/Webhooks/Webhooks.js b/frontend/app/components/Client/Webhooks/Webhooks.js index 100887285..150ed05a6 100644 --- a/frontend/app/components/Client/Webhooks/Webhooks.js +++ b/frontend/app/components/Client/Webhooks/Webhooks.js @@ -59,7 +59,7 @@ class Webhooks extends React.PureComponent { title="No webhooks available." size="small" show={ noSlackWebhooks.size === 0 } - icon + animatedIcon="no-results" >
{ noSlackWebhooks.map(webhook => ( diff --git a/frontend/app/components/Client/client.css b/frontend/app/components/Client/client.css index d9406e4d8..8e69458ef 100644 --- a/frontend/app/components/Client/client.css +++ b/frontend/app/components/Client/client.css @@ -18,7 +18,7 @@ & .tabContent { background-color: white; padding: 25px; - margin-top: -30px; + /* margin-top: -30px; */ margin-right: -20px; width: 100%; } diff --git a/frontend/app/components/Dashboard/Dashboard.js b/frontend/app/components/Dashboard/Dashboard.js index 2b71ef253..9d84a8779 100644 --- a/frontend/app/components/Dashboard/Dashboard.js +++ b/frontend/app/components/Dashboard/Dashboard.js @@ -212,8 +212,7 @@ export default class Dashboard extends React.PureComponent { show={ noWidgets } title="You haven't added any insights widgets!" subtext="Add new to keep track of Processed Sessions, Application Activity, Errors and lot more." - icon - empty + animatedIcon="empty-state" > { export default connect(state => ({ period: state.getIn([ 'dashboard', 'period' ]), platform: state.getIn([ 'dashboard', 'platform' ]), - currentProjectId: state.getIn([ 'user', 'siteId' ]), + currentProjectId: state.getIn([ 'site', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), }), { setPeriod, setPlatform })(DashboardHeader) diff --git a/frontend/app/components/Dashboard/NewDashboard.tsx b/frontend/app/components/Dashboard/NewDashboard.tsx new file mode 100644 index 000000000..9e4168cfc --- /dev/null +++ b/frontend/app/components/Dashboard/NewDashboard.tsx @@ -0,0 +1,38 @@ +import React, { useEffect } from 'react'; +import { useObserver } from "mobx-react-lite"; +import { useStore } from 'App/mstore'; +import { withRouter } from 'react-router-dom'; +import DashboardSideMenu from './components/DashboardSideMenu'; +import { Loader } from 'UI'; +import DashboardRouter from './components/DashboardRouter'; +import cn from 'classnames'; + +function NewDashboard(props) { + const { history, match: { params: { siteId, dashboardId, metricId } } } = props; + const { dashboardStore } = useStore(); + const loading = useObserver(() => dashboardStore.isLoading); + const isMetricDetails = history.location.pathname.includes('/metrics/') || history.location.pathname.includes('/metric/'); + + useEffect(() => { + dashboardStore.fetchList().then((resp) => { + if (parseInt(dashboardId) > 0) { + dashboardStore.selectDashboardById(dashboardId); + } + }); + }, [siteId]); + + return useObserver(() => ( + +
+
+ +
+
+ +
+
+
+ )); +} + +export default withRouter(NewDashboard); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/SideMenu/SideMenuSection.js b/frontend/app/components/Dashboard/SideMenu/SideMenuSection.js index e26e24da2..494ce7128 100644 --- a/frontend/app/components/Dashboard/SideMenu/SideMenuSection.js +++ b/frontend/app/components/Dashboard/SideMenu/SideMenuSection.js @@ -22,7 +22,7 @@ function SideMenuSection({ title, items, onItemClick, setShowAlerts, siteId }) { )}
-
+
({ - siteId: state.getIn([ 'user', 'siteId' ]) + siteId: state.getIn([ 'site', 'siteId' ]) }), { setShowAlerts })(SideMenuSection); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetriLineChart/CustomMetriLineChart.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetriLineChart/CustomMetriLineChart.tsx index ffbbc6b88..198afb088 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetriLineChart/CustomMetriLineChart.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetriLineChart/CustomMetriLineChart.tsx @@ -6,20 +6,21 @@ import { LineChart, Line, Legend } from 'recharts'; interface Props { data: any; params: any; - seriesMap: any; + // seriesMap: any; colors: any; onClick?: (event, index) => void; } function CustomMetriLineChart(props: Props) { - const { data, params, seriesMap, colors, onClick = () => null } = props; + const { data = { chart: [], namesMap: [] }, params, colors, onClick = () => null } = props; + return ( - { seriesMap.map((key, index) => ( + { Array.isArray(data.namesMap) && data.namesMap.map((key, index) => ( ))} diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/CustomMetricOverviewChart.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/CustomMetricOverviewChart.tsx new file mode 100644 index 000000000..1f65e1c81 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/CustomMetricOverviewChart.tsx @@ -0,0 +1,75 @@ +import React from 'react' +import { Styles } from '../../common'; +import { AreaChart, ResponsiveContainer, XAxis, YAxis, CartesianGrid, Area, Tooltip } from 'recharts'; +import { LineChart, Line, Legend } from 'recharts'; +import cn from 'classnames'; +import CountBadge from '../../common/CountBadge'; +import { numberWithCommas } from 'App/utils'; + +interface Props { + data: any; + // onClick?: (event, index) => void; +} +function CustomMetricOverviewChart(props: Props) { + const { data } = props; + const gradientDef = Styles.gradientDef(); + + return ( +
+
+
+
+
+ +
+
+ + + {gradientDef} + + + + + + +
+ ) +} + +export default CustomMetricOverviewChart + + +const countView = (avg, unit) => { + if (unit === 'mb') { + if (!avg) return 0; + const count = Math.trunc(avg / 1024 / 1024); + return numberWithCommas(count); + } + if (unit === 'min') { + if (!avg) return 0; + const count = Math.trunc(avg); + return numberWithCommas(count > 1000 ? count +'k' : count); + } + return avg ? numberWithCommas(avg): 0; + } \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/index.ts b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/index.ts new file mode 100644 index 000000000..2aa2ad492 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart/index.ts @@ -0,0 +1 @@ +export { default } from './CustomMetricOverviewChart'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPercentage/CustomMetricPercentage.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPercentage/CustomMetricPercentage.tsx index 177dccf9a..ffce73783 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPercentage/CustomMetricPercentage.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPercentage/CustomMetricPercentage.tsx @@ -12,7 +12,7 @@ function CustomMetriPercentage(props: Props) { return (
{numberWithCommas(data.count)}
-
{`${data.previousCount} ( ${data.countProgress}% )`}
+
{`${parseInt(data.previousCount || 0)} ( ${parseInt(data.countProgress || 0).toFixed(1)}% )`}
from previous period.
) diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart/CustomMetricPieChart.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart/CustomMetricPieChart.tsx index e48dd20dd..6d1cd01e4 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart/CustomMetricPieChart.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart/CustomMetricPieChart.tsx @@ -1,3 +1,4 @@ +//@ts-nocheck import React from 'react' import { ResponsiveContainer, Tooltip } from 'recharts'; import { PieChart, Pie, Cell } from 'recharts'; @@ -35,8 +36,7 @@ function CustomMetricPieChart(props: Props) { } } return ( -
- + { - const RADIAN = Math.PI / 180; - let radius1 = 15 + innerRadius + (outerRadius - innerRadius); - let radius2 = innerRadius + (outerRadius - innerRadius); - let x2 = cx + radius1 * Math.cos(-midAngle * RADIAN); - let y2 = cy + radius1 * Math.sin(-midAngle * RADIAN); - let x1 = cx + radius2 * Math.cos(-midAngle * RADIAN); - let y1 = cy + radius2 * Math.sin(-midAngle * RADIAN); + cx, + cy, + midAngle, + innerRadius, + outerRadius, + value, + }) => { + const RADIAN = Math.PI / 180; + let radius1 = 15 + innerRadius + (outerRadius - innerRadius); + let radius2 = innerRadius + (outerRadius - innerRadius); + let x2 = cx + radius1 * Math.cos(-midAngle * RADIAN); + let y2 = cy + radius1 * Math.sin(-midAngle * RADIAN); + let x1 = cx + radius2 * Math.cos(-midAngle * RADIAN); + let y1 = cy + radius2 * Math.sin(-midAngle * RADIAN); - const percentage = value * 100 / data.values.reduce((a, b) => a + b.sessionCount, 0); - - if (percentage<3){ - return null; - } - - return( - - ) - }} - label={({ - cx, - cy, - midAngle, - innerRadius, - outerRadius, - value, - index - }) => { - const RADIAN = Math.PI / 180; - let radius = 20 + innerRadius + (outerRadius - innerRadius); - let x = cx + radius * Math.cos(-midAngle * RADIAN); - let y = cy + radius * Math.sin(-midAngle * RADIAN); - const percentage = (value / data.values.reduce((a, b) => a + b.sessionCount, 0)) * 100; - let name = data.values[index].name || 'Unidentified'; - name = name.length > 20 ? name.substring(0, 20) + '...' : name; - if (percentage<3){ - return null; - } - return ( - cx ? "start" : "end"} - dominantBaseline="central" - fill='#666' - > - {name || 'Unidentified'} {numberWithCommas(value)} - - ); - }} - // label={({ - // cx, - // cy, - // midAngle, - // innerRadius, - // outerRadius, - // value, - // index - // }) => { - // const RADIAN = Math.PI / 180; - // const radius = 30 + innerRadius + (outerRadius - innerRadius); - // const x = cx + radius * Math.cos(-midAngle * RADIAN); - // const y = cy + radius * Math.sin(-midAngle * RADIAN); - - // return ( - // cx ? "start" : "end"} - // dominantBaseline="top" - // fontSize={10} - // > - // {data.values[index].name} ({value}) - // - // ); - // }} + const percentage = value * 100 / data.values.reduce((a, b) => a + b.sessionCount, 0); + + if (percentage<3){ + return null; + } + + return( + + ) + }} + label={({ + cx, + cy, + midAngle, + innerRadius, + outerRadius, + value, + index + }) => { + const RADIAN = Math.PI / 180; + let radius = 20 + innerRadius + (outerRadius - innerRadius); + let x = cx + radius * Math.cos(-midAngle * RADIAN); + let y = cy + radius * Math.sin(-midAngle * RADIAN); + const percentage = (value / data.values.reduce((a, b) => a + b.sessionCount, 0)) * 100; + let name = data.values[index].name || 'Unidentified'; + name = name.length > 20 ? name.substring(0, 20) + '...' : name; + if (percentage<3){ + return null; + } + return ( + cx ? "start" : "end"} + dominantBaseline="central" + fill='#666' + > + {name || 'Unidentified'} {numberWithCommas(value)} + + ); + }} > - {data.values.map((entry, index) => ( - - ))} + {data && data.values && data.values.map((entry, index) => ( + + ))} - +
Top 5
-
-
+ ) } diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidget/CustomMetricWidget.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidget/CustomMetricWidget.tsx index c5fd2ad3f..15acd21bb 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidget/CustomMetricWidget.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidget/CustomMetricWidget.tsx @@ -56,29 +56,29 @@ function CustomMetricWidget(props: Props) { const isTable = metric.viewType === 'table'; const isPieChart = metric.viewType === 'pieChart'; - useEffect(() => { - new APIClient()['post'](`/custom_metrics/${metricParams.metricId}/chart`, { ...metricParams, q: metric.name }) - .then(response => response.json()) - .then(({ errors, data }) => { - if (errors) { - console.log('err', errors) - } else { - const namesMap = data - .map(i => Object.keys(i)) - .flat() - .filter(i => i !== 'time' && i !== 'timestamp') - .reduce((unique: any, item: any) => { - if (!unique.includes(item)) { - unique.push(item); - } - return unique; - }, []); + // useEffect(() => { + // new APIClient()['post'](`/custom_metrics/${metricParams.metricId}/chart`, { ...metricParams, q: metric.name }) + // .then(response => response.json()) + // .then(({ errors, data }) => { + // if (errors) { + // console.log('err', errors) + // } else { + // const namesMap = data + // .map(i => Object.keys(i)) + // .flat() + // .filter(i => i !== 'time' && i !== 'timestamp') + // .reduce((unique: any, item: any) => { + // if (!unique.includes(item)) { + // unique.push(item); + // } + // return unique; + // }, []); - setSeriesMap(namesMap); - setData(getChartFormatter(period)(data)); - } - }).finally(() => setLoading(false)); - }, [period]) + // setSeriesMap(namesMap); + // setData(getChartFormatter(period)(data)); + // } + // }).finally(() => setLoading(false)); + // }, [period]) const clickHandlerTable = (filters) => { const activeWidget = { @@ -136,7 +136,7 @@ function CustomMetricWidget(props: Props) { diff --git a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidgetPreview/CustomMetricWidgetPreview.tsx b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidgetPreview/CustomMetricWidgetPreview.tsx index 10936f1de..a1a2534f9 100644 --- a/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidgetPreview/CustomMetricWidgetPreview.tsx +++ b/frontend/app/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidgetPreview/CustomMetricWidgetPreview.tsx @@ -61,27 +61,27 @@ function CustomMetricWidget(props: Props) { setLoading(true); // fetch new data for the widget preview - new APIClient()['post']('/custom_metrics/try', { ...metricParams, ...metric.toSaveData() }) - .then(response => response.json()) - .then(({ errors, data }) => { - if (errors) { - console.log('err', errors) - } else { - const namesMap = data - .map(i => Object.keys(i)) - .flat() - .filter(i => i !== 'time' && i !== 'timestamp') - .reduce((unique: any, item: any) => { - if (!unique.includes(item)) { - unique.push(item); - } - return unique; - }, []); + // new APIClient()['post']('/custom_metrics/try', { ...metricParams, ...metric.toSaveData() }) + // .then(response => response.json()) + // .then(({ errors, data }) => { + // if (errors) { + // console.log('err', errors) + // } else { + // const namesMap = data + // .map(i => Object.keys(i)) + // .flat() + // .filter(i => i !== 'time' && i !== 'timestamp') + // .reduce((unique: any, item: any) => { + // if (!unique.includes(item)) { + // unique.push(item); + // } + // return unique; + // }, []); - setSeriesMap(namesMap); - setData(getChartFormatter(period)(data)); - } - }).finally(() => setLoading(false)); + // setSeriesMap(namesMap); + // setData(getChartFormatter(period)(data)); + // } + // }).finally(() => setLoading(false)); }, [metric]) const onDateChange = (changedDates) => { @@ -168,7 +168,7 @@ function CustomMetricWidget(props: Props) { { metric.viewType === 'lineChart' && ( diff --git a/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.css b/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.css index d3d399918..529aa15eb 100644 --- a/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.css +++ b/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.css @@ -1,5 +1,5 @@ .bar { - height: 10px; + height: 5px; background-color: red; width: 100%; border-radius: 3px; diff --git a/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.js b/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.js index 99b37a032..8a09c13d4 100644 --- a/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.js +++ b/frontend/app/components/Dashboard/Widgets/ErrorsPerDomain/Bar.js @@ -10,7 +10,7 @@ const Bar = ({ className = '', width = 0, avg, domain, color }) => { {`${avg}`}
-
{domain}
+
{domain}
) } diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/BreakdownOfLoadedResources.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/BreakdownOfLoadedResources.tsx new file mode 100644 index 000000000..fd38e2a55 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/BreakdownOfLoadedResources.tsx @@ -0,0 +1,48 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function BreakdownOfLoadedResources(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + + + {gradientDef} + + + + + + + + + + + + ); +} + +export default BreakdownOfLoadedResources; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/index.ts new file mode 100644 index 000000000..5770a63d8 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources/index.ts @@ -0,0 +1 @@ +export { default } from './BreakdownOfLoadedResources' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/CPULoad.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/CPULoad.tsx new file mode 100644 index 000000000..53356bf0d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/CPULoad.tsx @@ -0,0 +1,57 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function CPULoad(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "CPU Load (%)" }} + /> + + + + + + ); +} + +export default CPULoad; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/index.ts new file mode 100644 index 000000000..37cec8b40 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CPULoad/index.ts @@ -0,0 +1 @@ +export { default } from './CPULoad' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx new file mode 100644 index 000000000..4e5d0f637 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/CallWithErrors.tsx @@ -0,0 +1,79 @@ +import React from 'react'; +import { Loader, NoContent } from 'UI'; +import { Styles, Table } from '../../common'; +import { getRE } from 'App/utils'; +import ImageInfo from './ImageInfo'; +import MethodType from './MethodType'; +import cn from 'classnames'; +import stl from './callWithErrors.css'; + +const cols = [ + { + key: 'method', + title: 'Method', + className: 'text-left', + Component: MethodType, + cellClass: 'ml-2', + width: '8%', + }, + { + key: 'urlHostpath', + title: 'Path', + Component: ImageInfo, + width: '40%', + }, + { + key: 'allRequests', + title: 'Requests', + className: 'text-left', + width: '15%', + }, + { + key: '4xx', + title: '4xx', + className: 'text-left', + width: '15%', + }, + { + key: '5xx', + title: '5xx', + className: 'text-left', + width: '15%', + } +]; + +interface Props { + data: any + metric?: any +} +function CallWithErrors(props: Props) { + const { data, metric } = props; + const [search, setSearch] = React.useState('') + const test = (value = '', serach) => getRE(serach, 'i').test(value); + const _data = search ? metric.data.chart.filter(i => test(i.urlHostpath, search)) : metric.data.chart.images; + + const write = ({ target: { name, value } }) => { + setSearch(value) + }; + + return ( + + +
+
+ +
+ + + + ); +} + +export default CallWithErrors; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/Chart.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/Chart.js new file mode 100644 index 000000000..2f406622d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/Chart.js @@ -0,0 +1,16 @@ +import { AreaChart, Area } from 'recharts'; +import { Styles } from '../../common'; + +const Chart = ({ data, compare }) => { + const colors = compare ? Styles.compareColors : Styles.colors; + + return ( + + + + ); +} + +Chart.displayName = 'Chart'; + +export default Chart; diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/ImageInfo.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/ImageInfo.js new file mode 100644 index 000000000..8251bec60 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/ImageInfo.js @@ -0,0 +1,12 @@ +import { Popup, Icon, TextEllipsis } from 'UI'; +import styles from './imageInfo.css'; + +const ImageInfo = ({ data }) => ( +
+ +
+); + +ImageInfo.displayName = 'ImageInfo'; + +export default ImageInfo; diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/MethodType.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/MethodType.js new file mode 100644 index 000000000..ba370b481 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/MethodType.js @@ -0,0 +1,10 @@ +import React from 'react' +import { Label } from 'UI'; + +const MethodType = ({ data }) => { + return ( + + ) +} + +export default MethodType diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/callWithErrors.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/callWithErrors.css new file mode 100644 index 000000000..bc37a3991 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/callWithErrors.css @@ -0,0 +1,22 @@ +.topActions { + position: absolute; + top: -4px; + right: 50px; + display: flex; + justify-content: flex-end; +} + +.searchField { + padding: 4px 5px; + border-bottom: dotted thin $gray-light; + border-radius: 3px; + &:focus, + &:active { + border: solid thin transparent !important; + box-shadow: none; + background-color: $gray-light; + } + &:hover { + border: solid thin $gray-light !important; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/imageInfo.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/imageInfo.css new file mode 100644 index 000000000..69030a582 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/imageInfo.css @@ -0,0 +1,39 @@ +.name { + display: flex; + align-items: center; + + & > span { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 60%; + } +} + +.imagePreview { + max-width: 200px; + max-height: 200px; +} + +.imageWrapper { + display: flex; + flex-flow: column; + align-items: center; + width: 40px; + text-align: center; + margin-right: 10px; + & > span { + height: 16px; + } + & .label { + font-size: 9px; + color: $gray-light; + } +} + +.popup { + background-color: #f5f5f5 !important; + &:before { + background-color: #f5f5f5 !important; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/index.ts new file mode 100644 index 000000000..4d3ba4df8 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallWithErrors/index.ts @@ -0,0 +1 @@ +export { default } from './CallWithErrors' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/CallsErrors4xx.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/CallsErrors4xx.tsx new file mode 100644 index 000000000..afaaeb37d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/CallsErrors4xx.tsx @@ -0,0 +1,49 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function CallsErrors4xx(props: Props) { + const { data, metric } = props; + return ( + + + + + + + + + { Array.isArray(metric.data.namesMap) && metric.data.namesMap.map((key, index) => ( + + ))} + + + + ); +} + +export default CallsErrors4xx; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/index.ts new file mode 100644 index 000000000..a21e4a950 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx/index.ts @@ -0,0 +1 @@ +export { default } from './CallsErrors4xx' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/CallsErrors5xx.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/CallsErrors5xx.tsx new file mode 100644 index 000000000..cc87d5c26 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/CallsErrors5xx.tsx @@ -0,0 +1,49 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function CallsErrors5xx(props: Props) { + const { data, metric } = props; + return ( + + + + + + + + + { Array.isArray(metric.data.namesMap) && metric.data.namesMap.map((key, index) => ( + + ))} + + + + ); +} + +export default CallsErrors5xx; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/index.ts new file mode 100644 index 000000000..661204c0d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx/index.ts @@ -0,0 +1 @@ +export { default } from './CallsErrors5xx' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/Crashes.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/Crashes.tsx new file mode 100644 index 000000000..0fa472db9 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/Crashes.tsx @@ -0,0 +1,55 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function Crashes(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + return ( + + + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Number of Crashes" }} + /> + + + + + + ); +} + +export default Crashes; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/index.ts new file mode 100644 index 000000000..ba5ce0764 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/Crashes/index.ts @@ -0,0 +1 @@ +export { default } from './Crashes' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/DomBuildingTime.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/DomBuildingTime.tsx new file mode 100644 index 000000000..60402a309 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/DomBuildingTime.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { withRequest } from 'HOCs' +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; +import WidgetAutoComplete from 'Shared/WidgetAutoComplete'; +import { toUnderscore } from 'App/utils'; + +const WIDGET_KEY = 'pagesDomBuildtime'; + +interface Props { + data: any + optionsLoading: any + fetchOptions: any + options: any + metric?: any +} +function DomBuildingTime(props: Props) { + const { data, optionsLoading, metric } = props; + const gradientDef = Styles.gradientDef(); + + const onSelect = (params) => { + // const _params = { density: 70 } + // TODO reload the data with new params; + // this.props.fetchWidget(WIDGET_KEY, dashbaordStore.period, props.platform, { ..._params, url: params.value }) + } + + return ( + + <> +
+ {/* */} + +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "DOM Build Time (ms)" }} + /> + + + + + +
+ ); +} + +export default withRequest({ + dataName: "options", + initialData: [], + dataWrapper: data => data, + loadingName: 'optionsLoading', + requestName: "fetchOptions", + endpoint: '/dashboard/' + toUnderscore(WIDGET_KEY) + '/search', + method: 'GET' +})(DomBuildingTime) \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/index.ts new file mode 100644 index 000000000..a3191aaf7 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime/index.ts @@ -0,0 +1 @@ +export { default } from './DomBuildingTime' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/ErrorsByOrigin.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/ErrorsByOrigin.tsx new file mode 100644 index 000000000..d7aefebd0 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/ErrorsByOrigin.tsx @@ -0,0 +1,51 @@ +//@ts-nocheck +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function ErrorsByOrigin(props: Props) { + const { data, metric } = props; + return ( + + + + + + + + + 1st Party} dataKey="firstParty" stackId="a" fill={Styles.colors[0]} /> + 3rd Party} dataKey="thirdParty" stackId="a" fill={Styles.colors[2]} /> + {/* 1st Party} dataKey="firstParty" stackId="a" fill={Styles.colors[0]} /> + 3rd Party} dataKey="thirdParty" stackId="a" fill={Styles.colors[2]} /> */} + + + + ); +} + +export default ErrorsByOrigin; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/index.ts new file mode 100644 index 000000000..18a8b9ec3 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin/index.ts @@ -0,0 +1 @@ +export { default } from './ErrorsByOrigin' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/ErrorsByType.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/ErrorsByType.tsx new file mode 100644 index 000000000..23a6fda45 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/ErrorsByType.tsx @@ -0,0 +1,50 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function ErrorsByType(props: Props) { + const { data, metric } = props; + return ( + + + + + + + + + + + + + + + + ); +} + +export default ErrorsByType; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/index.ts new file mode 100644 index 000000000..f889ccec7 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType/index.ts @@ -0,0 +1 @@ +export { default } from './ErrorsByType' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/ErrorsPerDomain.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/ErrorsPerDomain.tsx new file mode 100644 index 000000000..fab8ced65 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/ErrorsPerDomain.tsx @@ -0,0 +1,37 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { numberWithCommas } from 'App/utils'; +import Bar from 'App/components/Dashboard/Widgets/ErrorsPerDomain/Bar'; + +interface Props { + data: any + metric?: any +} +function ErrorsPerDomain(props: Props) { + const { data, metric } = props; + // const firstAvg = 10; + const firstAvg = metric.data.chart[0] && metric.data.chart[0].errorsCount; + return ( + +
+ {metric.data.chart.map((item, i) => + + )} +
+
+ ); +} + +export default ErrorsPerDomain; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/index.ts new file mode 100644 index 000000000..d08e3867b --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain/index.ts @@ -0,0 +1 @@ +export { default } from './ErrorsPerDomain' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/FPS.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/FPS.tsx new file mode 100644 index 000000000..a6311f7cc --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/FPS.tsx @@ -0,0 +1,60 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function FPS(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + <> +
+ +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Frames Per Second" }} + /> + + + + + +
+ ); +} + +export default FPS; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/index.ts new file mode 100644 index 000000000..85a43ba5e --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/FPS/index.ts @@ -0,0 +1 @@ +export { default } from './FPS' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/MemoryConsumption.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/MemoryConsumption.tsx new file mode 100644 index 000000000..3ccb890ed --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/MemoryConsumption.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function MemoryConsumption(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + <> +
+ +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "JS Heap Size (mb)" }} + /> + + + + + +
+ ); +} + +export default MemoryConsumption; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/index.ts new file mode 100644 index 000000000..7d426259c --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption/index.ts @@ -0,0 +1 @@ +export { default } from './MemoryConsumption' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/Chart.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/Chart.js new file mode 100644 index 000000000..2f406622d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/Chart.js @@ -0,0 +1,16 @@ +import { AreaChart, Area } from 'recharts'; +import { Styles } from '../../common'; + +const Chart = ({ data, compare }) => { + const colors = compare ? Styles.compareColors : Styles.colors; + + return ( + + + + ); +} + +Chart.displayName = 'Chart'; + +export default Chart; diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/CopyPath.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/CopyPath.js new file mode 100644 index 000000000..6b7e709e7 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/CopyPath.js @@ -0,0 +1,23 @@ +import React from 'react' +import copy from 'copy-to-clipboard' +import { useState } from 'react' + +const CopyPath = ({ data }) => { + const [copied, setCopied] = useState(false) + + const copyHandler = () => { + copy(data.url); + setCopied(true); + setTimeout(function() { + setCopied(false) + }, 500); + } + + return ( +
+ { copied ? 'Copied' : 'Copy Path'} +
+ ) +} + +export default CopyPath diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx new file mode 100644 index 000000000..0a3d5567e --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/MissingResources.tsx @@ -0,0 +1,63 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, Table } from '../../common'; +import { List } from 'immutable'; + +import Chart from './Chart'; +import ResourceInfo from './ResourceInfo'; +import CopyPath from './CopyPath'; + +const cols = [ + { + key: 'resource', + title: 'Resource', + Component: ResourceInfo, + width: '40%', + }, + { + key: 'sessions', + title: 'Sessions', + toText: count => `${ count > 1000 ? Math.trunc(count / 1000) : count }${ count > 1000 ? 'k' : '' }`, + width: '20%', + }, + { + key: 'trend', + title: 'Trend', + Component: Chart, + width: '20%', + }, + { + key: 'copy-path', + title: '', + Component: CopyPath, + cellClass: 'invisible group-hover:visible text-right', + width: '20%', + } +]; + +interface Props { + data: any + metric?: any +} +function MissingResources(props: Props) { + const { data, metric } = props; + + return ( + +
+
+ + + ); +} + +export default MissingResources; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/ResourceInfo.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/ResourceInfo.js new file mode 100644 index 000000000..d4b1ed9b8 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/ResourceInfo.js @@ -0,0 +1,18 @@ +import { diffFromNowString } from 'App/date'; +import { TextEllipsis } from 'UI'; + +import styles from './resourceInfo.css'; + +export default class ResourceInfo extends React.PureComponent { + render() { + const { data } = this.props; + return ( +
+ +
+ { data.endedAt && data.startedAt && `${ diffFromNowString(data.endedAt) } ago - ${ diffFromNowString(data.startedAt) } old` } +
+
+ ); + } +} diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/index.ts new file mode 100644 index 000000000..db419a09a --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/index.ts @@ -0,0 +1 @@ +export { default } from './MissingResources' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/resourceInfo.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/resourceInfo.css new file mode 100644 index 000000000..d73d23530 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/MissingResources/resourceInfo.css @@ -0,0 +1,10 @@ +.name { + letter-spacing: -.04em; + font-size: .9rem; + cursor: pointer; +} + +.timings { + color: $gray-medium; + font-size: 12px; +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/ResourceLoadedVsResponseEnd.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/ResourceLoadedVsResponseEnd.tsx new file mode 100644 index 000000000..0423a0007 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/ResourceLoadedVsResponseEnd.tsx @@ -0,0 +1,70 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + ComposedChart, Bar, CartesianGrid, Line, Legend, ResponsiveContainer, + XAxis, YAxis, Tooltip +} from 'recharts'; + +interface Props { + data: any + metric?: any +} +function ResourceLoadedVsResponseEnd(props: Props) { + const { data, metric } = props; + + return ( + + + + + + Styles.tickFormatter(val, 'ms')} + /> + Styles.tickFormatter(val, 'ms')} + /> + + + + + + + + + ); +} + +export default ResourceLoadedVsResponseEnd; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/index.ts new file mode 100644 index 000000000..072096a6f --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd/index.ts @@ -0,0 +1 @@ +export { default } from './ResourceLoadedVsResponseEnd' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/ResourceLoadedVsVisuallyComplete.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/ResourceLoadedVsVisuallyComplete.tsx new file mode 100644 index 000000000..7c76489a6 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/ResourceLoadedVsVisuallyComplete.tsx @@ -0,0 +1,72 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + ComposedChart, Bar, CartesianGrid, Line, Legend, ResponsiveContainer, + XAxis, YAxis, Tooltip +} from 'recharts'; + +interface Props { + data: any + metric?: any +} +function ResourceLoadedVsVisuallyComplete(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + + + + + Styles.tickFormatter(val, 'ms')} + /> + Styles.tickFormatter(val)} + /> + + + + + + + + + + ); +} + +export default ResourceLoadedVsVisuallyComplete; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/index.ts new file mode 100644 index 000000000..af77c13fa --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete/index.ts @@ -0,0 +1 @@ +export { default } from './ResourceLoadedVsVisuallyComplete' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/ResourceLoadingTime.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/ResourceLoadingTime.tsx new file mode 100644 index 000000000..905c347f2 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/ResourceLoadingTime.tsx @@ -0,0 +1,122 @@ +import React from 'react'; +import { NoContent, DropdownPlain } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { withRequest } from 'HOCs' +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; +import WidgetAutoComplete from 'Shared/WidgetAutoComplete'; +import { toUnderscore } from 'App/utils'; + +const WIDGET_KEY = 'resourcesLoadingTime'; +export const RESOURCE_OPTIONS = [ + { text: 'All', value: 'all', }, + { text: 'JS', value: "SCRIPT", }, + { text: 'CSS', value: "STYLESHEET", }, + { text: 'Fetch', value: "REQUEST", }, + { text: 'Image', value: "IMG", }, + { text: 'Media', value: "MEDIA", }, + { text: 'Other', value: "OTHER", }, +]; + +interface Props { + data: any + optionsLoading: any + fetchOptions: any + options: any + metric?: any +} +function ResourceLoadingTime(props: Props) { + const { data, optionsLoading, metric } = props; + const gradientDef = Styles.gradientDef(); + const [autoCompleteSelected, setSutoCompleteSelected] = React.useState(''); + const [type, setType] = React.useState(''); + + const onSelect = (params) => { + // const _params = { density: 70 } + setSutoCompleteSelected(params.value); + // TODO reload the data with new params; + // this.props.fetchWidget(WIDGET_KEY, dashbaordStore.period, props.platform, { ..._params, url: params.value }) + } + + const writeOption = (e, { name, value }) => { + // this.setState({ [name]: value }) + setType(value); + const _params = { density: 70 } // TODO reload the data with new params; + // this.props.fetchWidget(WIDGET_KEY, this.props.period, this.props.platform, { ..._params, [ name ]: value === 'all' ? null : value }) + } + + return ( + + <> +
+ {/* + */} + +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Resource Fetch Time (ms)" }} + /> + + + + + +
+ ); +} + +export default withRequest({ + dataName: "options", + initialData: [], + dataWrapper: data => data, + loadingName: 'optionsLoading', + requestName: "fetchOptions", + endpoint: '/dashboard/' + toUnderscore(WIDGET_KEY) + '/search', + method: 'GET' +})(ResourceLoadingTime) \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/index.ts new file mode 100644 index 000000000..1c9fa51c8 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime/index.ts @@ -0,0 +1 @@ +export { default } from './ResourceLoadingTime' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/ResponseTime.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/ResponseTime.tsx new file mode 100644 index 000000000..e1af351ee --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/ResponseTime.tsx @@ -0,0 +1,91 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { withRequest } from 'HOCs' +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; +import WidgetAutoComplete from 'Shared/WidgetAutoComplete'; +import { toUnderscore } from 'App/utils'; + +const WIDGET_KEY = 'pagesResponseTime'; + +interface Props { + data: any + optionsLoading: any + fetchOptions: any + options: any + metric?: any +} +function ResponseTime(props: Props) { + const { data, optionsLoading, metric } = props; + const gradientDef = Styles.gradientDef(); + + + const onSelect = (params) => { + // const _params = { density: 70 } + // TODO reload the data with new params; + // this.props.fetchWidget(WIDGET_KEY, dashbaordStore.period, props.platform, { ..._params, url: params.value }) + } + + return ( + + <> +
+ {/* */} + +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Page Response Time (ms)" }} + /> + + + + + +
+ ); +} + +export default withRequest({ + dataName: "options", + initialData: [], + dataWrapper: data => data, + loadingName: 'optionsLoading', + requestName: "fetchOptions", + endpoint: '/dashboard/' + toUnderscore(WIDGET_KEY) + '/search', + method: 'GET' +})(ResponseTime) \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/index.ts new file mode 100644 index 000000000..95effcb83 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime/index.ts @@ -0,0 +1 @@ +export { default } from './ResponseTime' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/ResponseTimeDistribution.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/ResponseTimeDistribution.tsx new file mode 100644 index 000000000..2f79230e5 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/ResponseTimeDistribution.tsx @@ -0,0 +1,128 @@ +import React from 'react'; +import { Loader, NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { + ComposedChart, Bar, BarChart, CartesianGrid, ResponsiveContainer, + XAxis, YAxis, ReferenceLine, Tooltip, Legend +} from 'recharts'; + + +const PercentileLine = props => { + const { + viewBox: { x, y }, + xoffset, + yheight, + height, + label + } = props; + return ( + + + + {label} + + + ); +}; + +interface Props { + data: any + metric?: any +} +function ResponseTimeDistribution(props: Props) { + const { data, metric } = props; + const colors = Styles.colors; + + return ( + +
+ +
+
+ + + + + + + 'Page Response Time: ' + val} /> + { metric.data.percentiles.map((item, i) => ( + + } + // allowDecimals={false} + x={item.responseTime} + strokeWidth={0} + strokeOpacity={1} + /> + ))} + + + + + + + + + + + +
+
+ ); +} + +export default ResponseTimeDistribution; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/index.ts new file mode 100644 index 000000000..163efa255 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/ResponseTimeDistribution/index.ts @@ -0,0 +1 @@ +export { default } from './ResponseTimeDistribution' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/SessionsAffectedByJSErrors.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/SessionsAffectedByJSErrors.tsx new file mode 100644 index 000000000..0c077e747 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/SessionsAffectedByJSErrors.tsx @@ -0,0 +1,47 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function SessionsAffectedByJSErrors(props: Props) { + const { data, metric } = props; + return ( + + + + + + + + + + + + + ); +} + +export default SessionsAffectedByJSErrors; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/index.ts new file mode 100644 index 000000000..b160b1af1 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors/index.ts @@ -0,0 +1 @@ +export { default } from './SessionsAffectedByJSErrors' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/SessionsImpactedBySlowRequests.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/SessionsImpactedBySlowRequests.tsx new file mode 100644 index 000000000..70202ea56 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/SessionsImpactedBySlowRequests.tsx @@ -0,0 +1,55 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; + +interface Props { + data: any + metric?: any +} +function SessionsImpactedBySlowRequests(props: Props) { + const { data, metric } = props; + const gradientDef = Styles.gradientDef(); + + return ( + + + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Number of Sessions" }} + /> + + + + + + ); +} + +export default SessionsImpactedBySlowRequests; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/index.ts new file mode 100644 index 000000000..d950b82ae --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests/index.ts @@ -0,0 +1 @@ +export { default } from './SessionsImpactedBySlowRequests' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.css new file mode 100644 index 000000000..dde6009e4 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.css @@ -0,0 +1,20 @@ +.bar { + height: 5px; + width: 100%; + border-radius: 3px; + display: flex; + align-items: center; + & div { + padding: 0 5px; + height: 20px; + color: #FFF; + } + & div:first-child { + border-top-left-radius: 3px; + border-bottom-left-radius: 3px; + } + & div:last-child { + border-top-right-radius: 3px; + border-bottom-right-radius: 3px; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.js new file mode 100644 index 000000000..b1204ee9e --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/Bar.js @@ -0,0 +1,34 @@ +import React from 'react' +import stl from './Bar.css' +// import { Styles } from '../common' +import { TextEllipsis } from 'UI'; + +const Bar = ({ className = '', versions = [], width = 0, avg, domain, colors }) => { + return ( +
+
+
+ {versions.map((v, i) => { + const w = (v.value * 100)/ avg; + return ( +
+ +
Version: {v.key}
+
Sessions: {v.value}
+
+ } /> +
+ ) + })} +
+
+ {`${avg}`} +
+
+
{domain}
+ + ) +} + +export default Bar \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/SessionsPerBrowser.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/SessionsPerBrowser.tsx new file mode 100644 index 000000000..ad8663390 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/SessionsPerBrowser.tsx @@ -0,0 +1,41 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import Bar from './Bar'; + +interface Props { + data: any + metric?: any +} +function SessionsPerBrowser(props: Props) { + const { data, metric } = props; + const firstAvg = metric.data.chart[0] && metric.data.chart[0].count; + + const getVersions = item => { + return Object.keys(item) + .filter(i => i !== 'browser' && i !== 'count') + .map(i => ({ key: 'v' +i, value: item[i]})) + } + return ( + +
+ {metric.data.chart.map((item, i) => + + )} +
+
+ ); +} + +export default SessionsPerBrowser; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/index.ts new file mode 100644 index 000000000..06f0656a1 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser/index.ts @@ -0,0 +1 @@ +export { default } from './SessionsPerBrowser' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/SlowestDomains.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/SlowestDomains.tsx new file mode 100644 index 000000000..2d74e2b39 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/SlowestDomains.tsx @@ -0,0 +1,36 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles } from '../../common'; +import { numberWithCommas } from 'App/utils'; +import Bar from 'App/components/Dashboard/Widgets/SlowestDomains/Bar'; + +interface Props { + data: any + metric?: any +} +function SlowestDomains(props: Props) { + const { data, metric } = props; + const firstAvg = metric.data.chart[0] && metric.data.chart[0].errorsCount; + return ( + +
+ {metric.data.chart.map((item, i) => + + )} +
+
+ ); +} + +export default SlowestDomains; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/index.ts new file mode 100644 index 000000000..311262347 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains/index.ts @@ -0,0 +1 @@ +export { default } from './SlowestDomains' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/Chart.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/Chart.js new file mode 100644 index 000000000..ab0a27a94 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/Chart.js @@ -0,0 +1,15 @@ +import { AreaChart, Area } from 'recharts'; +import { Styles } from '../../common'; + +const Chart = ({ data, compare }) => { + const colors = compare ? Styles.compareColors : Styles.colors; + return ( + + + + ); +} + +Chart.displayName = 'Chart'; + +export default Chart; diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/CopyPath.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/CopyPath.js new file mode 100644 index 000000000..6b7e709e7 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/CopyPath.js @@ -0,0 +1,23 @@ +import React from 'react' +import copy from 'copy-to-clipboard' +import { useState } from 'react' + +const CopyPath = ({ data }) => { + const [copied, setCopied] = useState(false) + + const copyHandler = () => { + copy(data.url); + setCopied(true); + setTimeout(function() { + setCopied(false) + }, 500); + } + + return ( +
+ { copied ? 'Copied' : 'Copy Path'} +
+ ) +} + +export default CopyPath diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ImageInfo.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ImageInfo.js new file mode 100644 index 000000000..fed6b71b6 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ImageInfo.js @@ -0,0 +1,27 @@ +import { Popup } from 'UI'; +import cn from 'classnames'; +import styles from './imageInfo.css'; + +const supportedTypes = ['png', 'jpg', 'jpeg', 'svg']; + +const ImageInfo = ({ data }) => { + const canPreview = supportedTypes.includes(data.type); + return ( +
+ +
{data.name}
+
+ } + disabled={!canPreview} + content={ One of the slowest images } + /> + + ) +}; + +ImageInfo.displayName = 'ImageInfo'; + +export default ImageInfo; diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ResourceType.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ResourceType.js new file mode 100644 index 000000000..9803a050f --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/ResourceType.js @@ -0,0 +1,12 @@ +import React from 'react' +import cn from 'classnames' + +const ResourceType = ({ data : { type = 'js' }, compare }) => { + return ( +
+ { type.toUpperCase() } +
+ ) +} + +export default ResourceType diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/SlowestResources.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/SlowestResources.tsx new file mode 100644 index 000000000..c4bbb1ed9 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/SlowestResources.tsx @@ -0,0 +1,81 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, Table } from '../../common'; +import { List } from 'immutable'; +import { numberWithCommas } from 'App/utils'; + +import Chart from './Chart'; +import ImageInfo from './ImageInfo'; +import ResourceType from './ResourceType'; +import CopyPath from './CopyPath'; + +export const RESOURCE_OPTIONS = [ + { text: 'All', value: 'ALL', }, + { text: 'CSS', value: 'STYLESHEET', }, + { text: 'JS', value: 'SCRIPT', }, +]; + +const cols = [ + { + key: 'type', + title: 'Type', + Component: ResourceType, + className: 'text-center justify-center', + cellClass: 'ml-2', + width: '8%', + }, + { + key: 'name', + title: 'File Name', + Component: ImageInfo, + cellClass: '-ml-2', + width: '40%', + }, + { + key: 'avg', + title: 'Load Time', + toText: avg => `${ avg ? numberWithCommas(Math.trunc(avg)) : 0} ms`, + className: 'justify-center', + width: '15%', + }, + { + key: 'trend', + title: 'Trend', + Component: Chart, + width: '15%', + }, + { + key: 'copy-path', + title: '', + Component: CopyPath, + cellClass: 'invisible group-hover:visible text-right', + width: '15%', + } +]; + +interface Props { + data: any + metric?: any +} +function SlowestResources(props: Props) { + const { data, metric } = props; + + return ( + +
+
+ + + ); +} + +export default SlowestResources; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/imageInfo.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/imageInfo.css new file mode 100644 index 000000000..1de36b529 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/imageInfo.css @@ -0,0 +1,52 @@ +.name { + display: flex; + align-items: center; + + & > span { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 60%; + } + + & .label { + max-width: 300px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } +} + +.hasPreview { + /* text-decoration: underline; */ + border-bottom: 1px dotted; + cursor: pointer; +} + +.imagePreview { + max-width: 200px; + max-height: 200px; +} + +.imageWrapper { + display: flex; + flex-flow: column; + align-items: center; + width: 40px; + text-align: center; + margin-right: 10px; + & > span { + height: 16px; + } + & .label { + font-size: 9px; + color: $gray-light; + } +} + +.popup { + background-color: #f5f5f5 !important; + &:before { + background-color: #f5f5f5 !important; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/index.ts new file mode 100644 index 000000000..ca907e9f0 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SlowestResources/index.ts @@ -0,0 +1 @@ +export { default } from './SlowestResources' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/Scale.js b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/Scale.js new file mode 100644 index 000000000..2171c432e --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/Scale.js @@ -0,0 +1,24 @@ +import React from 'react' +import { Styles } from '../../common'; +import cn from 'classnames'; +import stl from './scale.css'; + +function Scale({ colors }) { + const lastIndex = (Styles.colors.length - 1) + return ( +
+ {colors.map((c, i) => ( +
+ { i === 0 &&
Slow
} + { i === lastIndex &&
Fast
} +
+ ))} +
+ ) +} + +export default Scale diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.css new file mode 100644 index 000000000..a42f4af12 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.css @@ -0,0 +1,55 @@ +.maps { + height: auto; + width: 110%; + stroke: $gray-medium; + stroke-width: 1; + stroke-linecap: round; + stroke-linejoin: round; + margin-top: -20px; +} + +.location { + fill: $gray-light !important; + cursor: pointer; + stroke: #fff; + + &:focus, + &:hover { + fill: $teal !important; + outline: 0; + } +} + +.heat_index0 { + fill:$gray-light !important; +} + +.heat_index5 { + fill: #3EAAAF !important; +} + +.heat_index4 { + fill:#5FBABF !important; +} + +.heat_index3 { + fill: #7BCBCF !important; +} + +.heat_index2 { + fill: #96DCDF !important; +} + +.heat_index1 { + fill: #ADDCDF !important; +} + +.tooltip { + position: fixed; + padding: 5px; + border: 1px solid $gray-light; + border-radius: 3px; + background-color: white; + font-size: 12px; + line-height: 1.2; +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.tsx new file mode 100644 index 000000000..a538ed36a --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/SpeedIndexByLocation.tsx @@ -0,0 +1,107 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import Scale from './Scale'; +import { observer } from 'mobx-react-lite'; +import { numberWithCommas, positionOfTheNumber } from 'App/utils'; +import WorldMap from "@svg-maps/world"; +import { SVGMap } from "react-svg-map"; +import stl from './SpeedIndexByLocation.css'; +import cn from 'classnames'; + +interface Props { + metric?: any +} +function SpeedIndexByLocation(props: Props) { + const { metric } = props; + const wrapper: any = React.useRef(null); + let map: any = null; + const [tooltipStyle, setTooltipStyle] = React.useState({ display: 'none' }); + const [pointedLocation, setPointedLocation] = React.useState(null); + const dataMap = React.useMemo(() => { + const data = {}; + const max = metric.data.chart.reduce((acc, item) => Math.max(acc, item.avg), 0); + const min = metric.data.chart.reduce((acc, item) => Math.min(acc, item.avg), 0); + metric.data.chart.forEach((item: any) => { + item.perNumber = positionOfTheNumber(min, max, item.avg, 5); + data[item.userCountry.toLowerCase()] = item; + }); + return data; + }, []) + + const getLocationClassName = (location, index) => { + const i = (dataMap[location.id] ? dataMap[location.id].perNumber : 0); + const cls = stl["heat_index" + i]; + return cn(stl.location, cls); + } + + const getLocationName = (event) => { + if (!event) return null + const id = event.target.attributes.id.value; + const name = event.target.attributes.name.value; + const percentage = dataMap[id] ? dataMap[id].perNumber : 0; + return { name, id, percentage } + } + + const handleLocationMouseOver = (event) => { + const pointedLocation = getLocationName(event); + setPointedLocation(pointedLocation); + } + + const handleLocationMouseOut = () => { + setTooltipStyle({ display: 'none' }); + setPointedLocation(null); + } + + const handleLocationMouseMove = (event) => { + const tooltipStyle = { + display: 'block', + top: event.clientY + 10, + left: event.clientX - 100 + }; + setTooltipStyle(tooltipStyle); + } + + return ( + +
+ +
+ +
+
+ +
+
+ {pointedLocation && ( + <> +
{pointedLocation.name}
+
Avg: {dataMap[pointedLocation.id] ? numberWithCommas(parseInt(dataMap[pointedLocation.id].avg)) : 0}
+ + )} +
+
+ ); +} + +export default observer(SpeedIndexByLocation); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/index.ts new file mode 100644 index 000000000..1cbdfe2f8 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/index.ts @@ -0,0 +1 @@ +export { default } from './SpeedIndexByLocation' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/scale.css b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/scale.css new file mode 100644 index 000000000..5aa34f966 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation/scale.css @@ -0,0 +1,11 @@ +.bars { + & div:first-child { + border-top-left-radius: 3px; + border-top-right-radius: 3px; + } + + & div:last-child { + border-bottom-left-radius: 3px; + border-bottom-right-radius: 3px; + } +} \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/TimeToRender.tsx b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/TimeToRender.tsx new file mode 100644 index 000000000..45167c9b1 --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/TimeToRender.tsx @@ -0,0 +1,91 @@ +import React from 'react'; +import { NoContent } from 'UI'; +import { Styles, AvgLabel } from '../../common'; +import { withRequest } from 'HOCs' +import { + AreaChart, Area, + BarChart, Bar, CartesianGrid, Tooltip, + LineChart, Line, Legend, ResponsiveContainer, + XAxis, YAxis + } from 'recharts'; +import WidgetAutoComplete from 'Shared/WidgetAutoComplete'; +import { toUnderscore } from 'App/utils'; + +const WIDGET_KEY = 'timeToRender'; + +interface Props { + data: any + optionsLoading: any + fetchOptions: any + options: any + metric?: any +} +function TimeToRender(props: Props) { + const { data, optionsLoading, metric } = props; + const gradientDef = Styles.gradientDef(); + + + const onSelect = (params) => { + // const _params = { density: 70 } + // TODO reload the data with new params; + // this.props.fetchWidget(WIDGET_KEY, dashbaordStore.period, props.platform, { ..._params, url: params.value }) + } + + return ( + + <> +
+ {/* */} + +
+ + + {gradientDef} + + + Styles.tickFormatter(val)} + label={{ ...Styles.axisLabelLeft, value: "Time to Render (ms)" }} + /> + + + + + +
+ ); +} + +export default withRequest({ + dataName: "options", + initialData: [], + dataWrapper: data => data, + loadingName: 'optionsLoading', + requestName: "fetchOptions", + endpoint: '/dashboard/' + toUnderscore(WIDGET_KEY) + '/search', + method: 'GET' +})(TimeToRender) \ No newline at end of file diff --git a/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/index.ts b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/index.ts new file mode 100644 index 000000000..0e806bf6d --- /dev/null +++ b/frontend/app/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender/index.ts @@ -0,0 +1 @@ +export { default } from './TimeToRender' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashbaordListModal/DashbaordListModal.tsx b/frontend/app/components/Dashboard/components/DashbaordListModal/DashbaordListModal.tsx new file mode 100644 index 000000000..d91d058b0 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashbaordListModal/DashbaordListModal.tsx @@ -0,0 +1,50 @@ +import React from 'react'; +import { useStore } from 'App/mstore'; +import { SideMenuitem, SideMenuHeader, Icon, Button } from 'UI'; +import { withSiteId, dashboardSelected, metrics } from 'App/routes'; +import { withRouter } from 'react-router-dom'; +import { useModal } from 'App/components/Modal'; + +interface Props { + siteId: string + history: any +} +function DashbaordListModal(props: Props) { + const { dashboardStore } = useStore(); + const { hideModal } = useModal(); + const dashboards = dashboardStore.dashboards; + const activeDashboardId = dashboardStore.selectedDashboard?.dashboardId; + + const onItemClick = (dashboard) => { + dashboardStore.selectDashboardById(dashboard.dashboardId); + const path = withSiteId(dashboardSelected(dashboard.dashboardId), parseInt(props.siteId)); + props.history.push(path); + hideModal(); + }; + return ( +
+
Dashboards
+
+ {dashboards.map((item: any) => ( +
+ onItemClick(item)} // TODO add click handler + leading = {( +
+ {item.isPublic &&
} + {item.isPinned &&
} +
+ )} + /> +
+ ))} +
+
+ ); +} + +export default withRouter(DashbaordListModal); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashbaordListModal/index.ts b/frontend/app/components/Dashboard/components/DashbaordListModal/index.ts new file mode 100644 index 000000000..2948a8225 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashbaordListModal/index.ts @@ -0,0 +1 @@ +export { default } from './DashbaordListModal' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardEditModal/DashboardEditModal.tsx b/frontend/app/components/Dashboard/components/DashboardEditModal/DashboardEditModal.tsx new file mode 100644 index 000000000..e6eae9e4e --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardEditModal/DashboardEditModal.tsx @@ -0,0 +1,86 @@ +import { useObserver } from 'mobx-react-lite'; +import React from 'react'; +import { Button, Modal, Form, Icon, Checkbox } from 'UI'; +import { useStore } from 'App/mstore' + +interface Props { + show: boolean; + // dashboard: any; + closeHandler?: () => void; +} +function DashboardEditModal(props: Props) { + const { show, closeHandler } = props; + const { dashboardStore } = useStore(); + const dashboard = useObserver(() => dashboardStore.dashboardInstance); + + const onSave = () => { + dashboardStore.save(dashboard).then(closeHandler); + } + + const write = ({ target: { value, name } }) => dashboard.update({ [ name ]: value }) + const writeOption = (e, { checked, name }) => { + dashboard.update({ [name]: checked }); + } + + return useObserver(() => ( + + +
{ 'Edit Dashboard' }
+ +
+ + +
+ + + + + + +
+ +
dashboard.update({ 'isPublic': !dashboard.isPublic }) }> + + Team can see and edit the dashboard. +
+
+
+ +
+ +
+ + +
+
+
+ )); +} + +export default DashboardEditModal; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardEditModal/index.ts b/frontend/app/components/Dashboard/components/DashboardEditModal/index.ts new file mode 100644 index 000000000..c7f4d7b17 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardEditModal/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardEditModal' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardForm/DashboardForm.tsx b/frontend/app/components/Dashboard/components/DashboardForm/DashboardForm.tsx new file mode 100644 index 000000000..9255663e9 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardForm/DashboardForm.tsx @@ -0,0 +1,60 @@ +import { useObserver } from 'mobx-react-lite'; +import React from 'react'; +import { Input } from 'UI'; +import { useDashboardStore } from '../../store/store'; +import cn from 'classnames'; +import { useStore } from 'App/mstore'; + +interface Props { +} + +function DashboardForm(props: Props) { + const { dashboardStore } = useStore(); + const dashboard = dashboardStore.dashboardInstance; + + const write = ({ target: { value, name } }) => dashboard.update({ [ name ]: value }) + const writeRadio = ({ target: { value, name } }) => { + dashboard.update({ [name]: value === 'team' }); + } + + return useObserver(() => ( +
+
+ + +
+ +
+ + +
+ + + +
+
+
+ )); +} + +export default DashboardForm; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardForm/index.ts b/frontend/app/components/Dashboard/components/DashboardForm/index.ts new file mode 100644 index 000000000..01c5b0072 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardForm/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardForm'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardMetricSelection/DashboardMetricSelection.tsx b/frontend/app/components/Dashboard/components/DashboardMetricSelection/DashboardMetricSelection.tsx new file mode 100644 index 000000000..400654df2 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardMetricSelection/DashboardMetricSelection.tsx @@ -0,0 +1,117 @@ +import React, { useEffect } from 'react'; +import WidgetWrapper from '../WidgetWrapper'; +import { useObserver } from 'mobx-react-lite'; +import cn from 'classnames'; +import { useStore } from 'App/mstore'; + +function WidgetCategoryItem({ category, isSelected, onClick, selectedWidgetIds }) { + const selectedCategoryWidgetsCount = useObserver(() => { + return category.widgets.filter(widget => selectedWidgetIds.includes(widget.metricId)).length; + }); + return ( +
onClick(category)} + > +
{category.name}
+
{category.description}
+ {selectedCategoryWidgetsCount > 0 && ( +
+ {`Selected ${selectedCategoryWidgetsCount} of ${category.widgets.length}`} +
+ )} +
+ ); +} + +function DashboardMetricSelection(props) { + const { dashboardStore } = useStore(); + let widgetCategories: any[] = useObserver(() => dashboardStore.widgetCategories); + const [activeCategory, setActiveCategory] = React.useState(); + const [selectAllCheck, setSelectAllCheck] = React.useState(false); + const selectedWidgetIds = useObserver(() => dashboardStore.selectedWidgets.map((widget: any) => widget.metricId)); + + useEffect(() => { + dashboardStore?.fetchTemplates().then(templates => { + setActiveCategory(dashboardStore.widgetCategories[0]); + }); + }, []); + + const handleWidgetCategoryClick = (category: any) => { + setActiveCategory(category); + setSelectAllCheck(false); + }; + + const toggleAllWidgets = ({ target: { checked }}) => { + // dashboardStore.toggleAllSelectedWidgets(checked); + setSelectAllCheck(checked); + if (checked) { + dashboardStore.selectWidgetsByCategory(activeCategory.name); + } else { + dashboardStore.removeSelectedWidgetByCategory(activeCategory); + } + } + + return useObserver(() => ( +
+
+
+
Categories
+
+ +
+ {activeCategory && ( + <> +
+

{activeCategory.name}

+ {activeCategory.widgets.length} +
+ +
+ Past 7 days data + +
+ + )} +
+
+
+
+
+ {activeCategory && widgetCategories.map((category, index) => + + )} +
+
+
+
+ {activeCategory && activeCategory.widgets.map((widget: any) => ( + dashboardStore.toggleWidgetSelection(widget)} + /> + ))} +
+
+
+
+ )); +} + +export default DashboardMetricSelection; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardMetricSelection/index.ts b/frontend/app/components/Dashboard/components/DashboardMetricSelection/index.ts new file mode 100644 index 000000000..4436d6bfc --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardMetricSelection/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardMetricSelection'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx b/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx new file mode 100644 index 000000000..575cb7de8 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx @@ -0,0 +1,76 @@ +import React from 'react'; +import { useObserver } from 'mobx-react-lite'; +import DashboardMetricSelection from '../DashboardMetricSelection'; +import DashboardForm from '../DashboardForm'; +import { Button } from 'UI'; +import { withRouter } from 'react-router-dom'; +import { useStore } from 'App/mstore'; +import { useModal } from 'App/components/Modal'; +import { dashboardMetricCreate, withSiteId } from 'App/routes'; + +interface Props { + history: any + siteId?: string + dashboardId?: string +} +function DashboardModal(props) { + const { history, siteId, dashboardId } = props; + const { dashboardStore } = useStore(); + const selectedWidgetsCount = useObserver(() => dashboardStore.selectedWidgets.length); + const { hideModal } = useModal(); + const dashboard = useObserver(() => dashboardStore.dashboardInstance); + const loading = useObserver(() => dashboardStore.isSaving); + + const onSave = () => { + dashboardStore.save(dashboard).then(hideModal).then(() => { + if (dashboard.exists()) { + dashboardStore.fetch(dashboard.dashboardId) + } + }) + } + + const handleCreateNew = () => { + const path = withSiteId(dashboardMetricCreate(dashboardId), siteId); + history.push(path); + hideModal(); + } + + return useObserver(() => ( +
+
+
+

+ { dashboard.exists() ? "Add metric(s) to dashboard" : "Create Dashboard" } +

+
+
+ {dashboard.exists() && } +
+
+ { !dashboard.exists() && ( + <> + +

Create new dashboard by choosing from the range of predefined metrics that you care about. You can always add your custom metrics later.

+ + )} + + +
+ + {selectedWidgetsCount} Widgets +
+
+ )); +} + +export default withRouter(DashboardModal); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardModal/index.ts b/frontend/app/components/Dashboard/components/DashboardModal/index.ts new file mode 100644 index 000000000..7082b746b --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardModal/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardModal' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardRouter/DashboardRouter.tsx b/frontend/app/components/Dashboard/components/DashboardRouter/DashboardRouter.tsx new file mode 100644 index 000000000..6004202a2 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardRouter/DashboardRouter.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import { Switch, Route } from 'react-router'; +import { withRouter } from 'react-router-dom'; + +import { + metrics, + metricDetails, + dashboardSelected, + dashboardMetricCreate, + dashboardMetricDetails, + withSiteId, + dashboard, +} from 'App/routes'; +import DashboardView from '../DashboardView'; +import MetricsView from '../MetricsView'; +import WidgetView from '../WidgetView'; + +function DashboardViewSelected({ siteId, dashboardId}) { + return ( + + ) +} + +interface Props { + history: any + match: any +} +function DashboardRouter(props: Props) { + const { match: { params: { siteId, dashboardId, metricId } } } = props; + return ( +
+ + + + + + + + + + + + + + + + + + + + + + + + + +
+ ); +} + +export default withRouter(DashboardRouter); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardRouter/index.ts b/frontend/app/components/Dashboard/components/DashboardRouter/index.ts new file mode 100644 index 000000000..62c27a8fd --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardRouter/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardRouter'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardSelectionModal/DashboardSelectionModal.tsx b/frontend/app/components/Dashboard/components/DashboardSelectionModal/DashboardSelectionModal.tsx new file mode 100644 index 000000000..830730d7b --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardSelectionModal/DashboardSelectionModal.tsx @@ -0,0 +1,73 @@ +import { useObserver } from 'mobx-react-lite'; +import React from 'react'; +import { Button, Modal, Form, Icon } from 'UI'; +import { useStore } from 'App/mstore' +import DropdownPlain from 'Shared/DropdownPlain'; + +interface Props { + metricId: string, + show: boolean; + closeHandler?: () => void; +} +function DashboardSelectionModal(props: Props) { + const { show, metricId, closeHandler } = props; + const { dashboardStore } = useStore(); + const dashboardOptions = dashboardStore.dashboards.map((i: any) => ({ + key: i.id, + text: i.name, + value: i.dashboardId, + })); + const [selectedId, setSelectedId] = React.useState(dashboardOptions[0].value); + + const onSave = () => { + const dashboard = dashboardStore.getDashboard(selectedId) + if (dashboard) { + dashboardStore.addWidgetToDashboard(dashboard, [metricId]).then(closeHandler) + } + } + + return useObserver(() => ( + + +
{ 'Add to selected dashboard' }
+ +
+ + +
+
+ + + setSelectedId(value)} + /> + + +
+
+ +
+ + +
+
+
+ )); +} + +export default DashboardSelectionModal; \ No newline at end of file diff --git a/api/routers/app/__init__.py b/frontend/app/components/Dashboard/components/DashboardSelectionModal/index.ts similarity index 100% rename from api/routers/app/__init__.py rename to frontend/app/components/Dashboard/components/DashboardSelectionModal/index.ts diff --git a/frontend/app/components/Dashboard/components/DashboardSideMenu/DashboardSideMenu.tsx b/frontend/app/components/Dashboard/components/DashboardSideMenu/DashboardSideMenu.tsx new file mode 100644 index 000000000..9dd64733c --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardSideMenu/DashboardSideMenu.tsx @@ -0,0 +1,126 @@ +//@ts-nocheck +import { useObserver } from 'mobx-react-lite'; +import React from 'react'; +import { SideMenuitem, SideMenuHeader, Icon, Button } from 'UI'; +import { useStore } from 'App/mstore'; +import { withRouter } from 'react-router-dom'; +import { withSiteId, dashboardSelected, metrics } from 'App/routes'; +import { useModal } from 'App/components/Modal'; +import DashbaordListModal from '../DashbaordListModal'; +import DashboardModal from '../DashboardModal'; +import cn from 'classnames'; +import { Tooltip } from 'react-tippy'; +import { connect } from 'react-redux'; +import { setShowAlerts } from 'Duck/dashboard'; + +const SHOW_COUNT = 8; +interface Props { + siteId: string + history: any + setShowAlerts: (show: boolean) => void +} +function DashboardSideMenu(props: Props) { + const { history, siteId, setShowAlerts } = props; + const { hideModal, showModal } = useModal(); + const { dashboardStore } = useStore(); + const dashboardId = useObserver(() => dashboardStore.selectedDashboard?.dashboardId); + const dashboardsPicked = useObserver(() => dashboardStore.dashboards.slice(0, SHOW_COUNT)); + const remainingDashboardsCount = dashboardStore.dashboards.length - SHOW_COUNT; + const isMetric = history.location.pathname.includes('metrics'); + + const redirect = (path) => { + history.push(path); + } + + const onItemClick = (dashboard) => { + dashboardStore.selectDashboardById(dashboard.dashboardId); + const path = withSiteId(dashboardSelected(dashboard.dashboardId), parseInt(siteId)); + history.push(path); + }; + + const onAddDashboardClick = (e) => { + dashboardStore.initDashboard(); + showModal(, {}) + } + + const togglePinned = (dashboard) => { + dashboardStore.updatePinned(dashboard.dashboardId); + } + + return useObserver(() => ( +
+ + {dashboardsPicked.sort((a: any, b: any) => a.isPinned === b.isPinned ? 0 : a.isPinned ? -1 : 1 ).map((item: any) => ( + onItemClick(item)} + className="group" + leading = {( +
+ {item.isPublic &&
} + {item.isPinned &&
} + {!item.isPinned && ( + +
togglePinned(item)} + > + +
+
+ )} +
+ )} + /> + ))} +
+ {remainingDashboardsCount > 0 && ( +
showModal(, {})} + > + {remainingDashboardsCount} More +
+ )} +
+
+
+ +
+
+
+ redirect(withSiteId(metrics(), siteId))} + /> +
+
+
+ setShowAlerts(true)} + /> +
+
+ )); +} + +export default connect(null, { setShowAlerts })(withRouter(DashboardSideMenu)); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardSideMenu/index.ts b/frontend/app/components/Dashboard/components/DashboardSideMenu/index.ts new file mode 100644 index 000000000..7d83f5bed --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardSideMenu/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardSideMenu'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx new file mode 100644 index 000000000..16803e34c --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx @@ -0,0 +1,129 @@ +import React, { useEffect } from 'react'; +import { observer, useObserver } from 'mobx-react-lite'; +import { useStore } from 'App/mstore'; +import { Button, PageTitle, Link, Loader, NoContent, ItemMenu } from 'UI'; +import { withSiteId, dashboardMetricCreate, dashboardSelected, dashboard } from 'App/routes'; +import withModal from 'App/components/Modal/withModal'; +import DashboardWidgetGrid from '../DashboardWidgetGrid'; +import { confirm } from 'UI/Confirmation'; +import { withRouter } from 'react-router-dom'; +import { useModal } from 'App/components/Modal'; +import DashboardModal from '../DashboardModal'; +import DashboardEditModal from '../DashboardEditModal'; +import DateRange from 'Shared/DateRange'; +import AlertFormModal from 'App/components/Alerts/AlertFormModal'; +import withPageTitle from 'HOCs/withPageTitle'; + +interface Props { + siteId: number; + history: any + match: any + dashboardId: any +} +function DashboardView(props: Props) { + const { siteId, dashboardId } = props; + const { dashboardStore } = useStore(); + const { hideModal, showModal } = useModal(); + const showAlertModal = useObserver(() => dashboardStore.showAlertModal); + const loading = useObserver(() => dashboardStore.fetchingDashboard); + const dashboards = useObserver(() => dashboardStore.dashboards); + const dashboard: any = useObserver(() => dashboardStore.selectedDashboard); + const period = useObserver(() => dashboardStore.period); + const [showEditModal, setShowEditModal] = React.useState(false); + + useEffect(() => { + if (!dashboard || !dashboard.dashboardId) return; + dashboardStore.fetch(dashboard.dashboardId) + }, [dashboard]); + + useEffect(() => { + if (dashboardId) return; + dashboardStore.selectDefaultDashboard(); + }, []); + + const onAddWidgets = () => { + dashboardStore.initDashboard(dashboard) + showModal(, {}) + } + + const onEdit = () => { + dashboardStore.initDashboard(dashboard) + setShowEditModal(true) + } + + const onDelete = async () => { + if (await confirm({ + header: 'Confirm', + confirmButton: 'Yes, delete', + confirmation: `Are you sure you want to permanently delete this Dashboard?` + })) { + dashboardStore.deleteDashboard(dashboard).then(() => { + dashboardStore.selectDefaultDashboard().then(({ dashboardId }) => { + props.history.push(withSiteId(dashboard(), siteId)); + }); + }); + } + } + + return useObserver(() => ( + + Create Dashboard + } + > +
+ setShowEditModal(false)} + /> +
+
+ + +
+
+
+ {/* Time Range */} + dashboardStore.setPeriod(period)} + customRangeRight + direction="left" + /> +
+
+
+ +
+
+
+ + dashboardStore.updateKey('showAlertModal', false)} + /> +
+ + + )); +} + +export default withPageTitle('Dashboards - OpenReplay')(withRouter(withModal(DashboardView))); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardView/index.ts b/frontend/app/components/Dashboard/components/DashboardView/index.ts new file mode 100644 index 000000000..569832baa --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardView/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardView' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx new file mode 100644 index 000000000..7c5d82d69 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/DashboardWidgetGrid.tsx @@ -0,0 +1,50 @@ +import React from 'react'; +import { useStore } from 'App/mstore'; +import WidgetWrapper from '../WidgetWrapper'; +import { NoContent, Button, Loader } from 'UI'; +import { useObserver } from 'mobx-react-lite'; + +interface Props { + siteId: string, + dashboardId: string; + onEditHandler: () => void; +} +function DashboardWidgetGrid(props) { + const { dashboardId, siteId } = props; + const { dashboardStore } = useStore(); + const loading = useObserver(() => dashboardStore.isLoading); + const dashbaord: any = dashboardStore.selectedDashboard; + const list: any = useObserver(() => dashbaord?.widgets); + + return useObserver(() => ( + + +

Metrics helps you visualize trends from sessions captured by OpenReplay

+ +
+ } + > +
+ {list && list.map((item, index) => ( + dashbaord.swapWidgetPosition(dragIndex, hoverIndex)} + dashboardId={dashboardId} + siteId={siteId} + isWidget={true} + /> + ))} +
+
+
+ )); +} + +export default DashboardWidgetGrid; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/DashboardWidgetGrid/index.ts b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/index.ts new file mode 100644 index 000000000..410933285 --- /dev/null +++ b/frontend/app/components/Dashboard/components/DashboardWidgetGrid/index.ts @@ -0,0 +1 @@ +export { default } from './DashboardWidgetGrid'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/FilterSeries/FilterSeries.tsx b/frontend/app/components/Dashboard/components/FilterSeries/FilterSeries.tsx new file mode 100644 index 000000000..181a3c8ee --- /dev/null +++ b/frontend/app/components/Dashboard/components/FilterSeries/FilterSeries.tsx @@ -0,0 +1,112 @@ +import React, { useEffect, useState } from 'react'; +import FilterList from 'Shared/Filters/FilterList'; +import { + edit, + updateSeries, + addSeriesFilterFilter, + removeSeriesFilterFilter, + editSeriesFilterFilter, + editSeriesFilter, +} from 'Duck/customMetrics'; +import { connect } from 'react-redux'; +import { IconButton, Icon } from 'UI'; +import FilterSelection from 'Shared/Filters/FilterSelection'; +import SeriesName from './SeriesName'; +import cn from 'classnames'; +import { observer } from 'mobx-react-lite'; + +interface Props { + seriesIndex: number; + series: any; + edit: typeof edit; + updateSeries: typeof updateSeries; + onRemoveSeries: (seriesIndex) => void; + canDelete?: boolean; + addSeriesFilterFilter: typeof addSeriesFilterFilter; + editSeriesFilterFilter: typeof editSeriesFilterFilter; + editSeriesFilter: typeof editSeriesFilter; + removeSeriesFilterFilter: typeof removeSeriesFilterFilter; + hideHeader?: boolean; + emptyMessage?: any; + observeChanges?: () => void; +} + +function FilterSeries(props: Props) { + const { observeChanges = () => {}, canDelete, hideHeader = false, emptyMessage = 'Add user event or filter to define the series by clicking Add Step.' } = props; + const [expanded, setExpanded] = useState(true) + const { series, seriesIndex } = props; + + useEffect(observeChanges, [series.filter]); + + const onAddFilter = (filter) => { + series.filter.addFilter(filter) + } + + const onUpdateFilter = (filterIndex, filter) => { + series.filter.updateFilter(filterIndex, filter) + } + + const onChangeEventsOrder = (e, { name, value }) => { + series.filter.updateKey(name, value) + } + + const onRemoveFilter = (filterIndex) => { + series.filter.removeFilter(filterIndex) + } + + return ( +
+
+
+ series.update('name', name) } /> +
+ +
+
+ +
+ +
setExpanded(!expanded)} className="ml-3"> + +
+
+
+ { expanded && ( + <> +
+ { series.filter.filters.length > 0 ? ( + + ): ( +
{emptyMessage}
+ )} +
+
+
+ + + +
+
+ + )} +
+ ); +} + +export default connect(null, { + edit, + updateSeries, + addSeriesFilterFilter, + editSeriesFilterFilter, + editSeriesFilter, + removeSeriesFilterFilter, +})(observer(FilterSeries)); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/SeriesName.tsx b/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/SeriesName.tsx new file mode 100644 index 000000000..5d25e9de9 --- /dev/null +++ b/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/SeriesName.tsx @@ -0,0 +1,57 @@ +import React, { useState, useRef, useEffect } from 'react'; +import { Icon } from 'UI'; + +interface Props { + name: string; + onUpdate: (name) => void; + seriesIndex?: number; +} +function SeriesName(props: Props) { + const { seriesIndex = 1 } = props; + const [editing, setEditing] = useState(false) + const [name, setName] = useState(props.name) + const ref = useRef(null) + + const write = ({ target: { value, name } }) => { + setName(value) + } + + const onBlur = () => { + setEditing(false) + props.onUpdate(name) + } + + useEffect(() => { + if (editing) { + ref.current.focus() + } + }, [editing]) + + useEffect(() => { + setName(props.name) + }, [props.name]) + + // const { name } = props; + return ( +
+ { editing ? ( + setEditing(true)} + /> + ) : ( +
{name.trim() === '' ? 'Seriess ' + (seriesIndex + 1) : name }
+ )} + +
setEditing(true)}>
+
+ ); +} + +export default SeriesName; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/index.ts b/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/index.ts new file mode 100644 index 000000000..90e63cdb6 --- /dev/null +++ b/frontend/app/components/Dashboard/components/FilterSeries/SeriesName/index.ts @@ -0,0 +1 @@ +export { default } from './SeriesName'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/FilterSeries/index.ts b/frontend/app/components/Dashboard/components/FilterSeries/index.ts new file mode 100644 index 000000000..5882e382a --- /dev/null +++ b/frontend/app/components/Dashboard/components/FilterSeries/index.ts @@ -0,0 +1 @@ +export { default } from './FilterSeries' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricListItem/MetricListItem.tsx b/frontend/app/components/Dashboard/components/MetricListItem/MetricListItem.tsx new file mode 100644 index 000000000..f9f5e6d96 --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricListItem/MetricListItem.tsx @@ -0,0 +1,47 @@ +import React from 'react'; +import { Icon, NoContent, Label, Link, Pagination } from 'UI'; +import { checkForRecent, formatDateTimeDefault, convertTimestampToUtcTimestamp } from 'App/date'; + +interface Props { + metric: any; +} + +function DashboardLink({ dashboards}) { + return ( + dashboards.map(dashboard => ( + +
+
·
+ {dashboard.name} +
+ + )) + ); +} + +function MetricListItem(props: Props) { + const { metric } = props; + return ( +
+
+ + {metric.name} + +
+
+
+ +
+
{metric.owner}
+
+
+ + {metric.isPublic ? 'Team' : 'Private'} +
+
+
{metric.lastModified && checkForRecent(metric.lastModified, 'LLL dd, yyyy, hh:mm a')}
+
+ ); +} + +export default MetricListItem; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricListItem/index.ts b/frontend/app/components/Dashboard/components/MetricListItem/index.ts new file mode 100644 index 000000000..b4c506a23 --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricListItem/index.ts @@ -0,0 +1 @@ +export { default } from './MetricListItem'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx new file mode 100644 index 000000000..9d895de13 --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsList/MetricsList.tsx @@ -0,0 +1,59 @@ +import { useObserver } from 'mobx-react-lite'; +import React, { useEffect } from 'react'; +import { NoContent, Pagination } from 'UI'; +import { useStore } from 'App/mstore'; +import { getRE } from 'App/utils'; +import MetricListItem from '../MetricListItem'; +import { sliceListPerPage } from 'App/utils'; + +interface Props { } +function MetricsList(props: Props) { + const { metricStore } = useStore(); + const metrics = useObserver(() => metricStore.metrics); + const metricsSearch = useObserver(() => metricStore.metricsSearch); + const filterList = (list) => { + const filterRE = getRE(metricsSearch, 'i'); + let _list = list.filter(w => { + const dashbaordNames = w.dashboards.map(d => d.name).join(' '); + return filterRE.test(w.name) || filterRE.test(w.metricType) || filterRE.test(w.owner) || filterRE.test(dashbaordNames); + }); + return _list + } + const list: any = metricsSearch !== '' ? filterList(metrics) : metrics; + const lenth = list.length; + + useEffect(() => { + metricStore.updateKey('sessionsPage', 1); + }, []) + + return useObserver(() => ( + +
+
+
Title
+
Type
+
Dashboards
+
Owner
+
Visibility
+
Last Modified
+
+ + {sliceListPerPage(list, metricStore.page - 1, metricStore.pageSize).map((metric: any) => ( + + ))} +
+ +
+ metricStore.updateKey('page', page)} + limit={metricStore.pageSize} + debounceRequest={100} + /> +
+
+ )); +} + +export default MetricsList; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsList/index.ts b/frontend/app/components/Dashboard/components/MetricsList/index.ts new file mode 100644 index 000000000..ad693888c --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsList/index.ts @@ -0,0 +1 @@ +export { default } from './MetricsList'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsSearch/MetricsSearch.tsx b/frontend/app/components/Dashboard/components/MetricsSearch/MetricsSearch.tsx new file mode 100644 index 000000000..066598c8e --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsSearch/MetricsSearch.tsx @@ -0,0 +1,34 @@ +import { useObserver } from 'mobx-react-lite'; +import React, { useEffect, useState } from 'react'; +import { useStore } from 'App/mstore'; +import { Icon } from 'UI'; +import { debounce } from 'App/utils'; + +let debounceUpdate: any = () => {} +function MetricsSearch(props) { + const { metricStore } = useStore(); + const [query, setQuery] = useState(metricStore.metricsSearch); + useEffect(() => { + debounceUpdate = debounce((key, value) => metricStore.updateKey(key, value), 500); + }, []) + + const write = ({ target: { name, value } }) => { + setQuery(value); + debounceUpdate('metricsSearch', value); + } + + return useObserver(() => ( +
+ + +
+ )); +} + +export default MetricsSearch; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsSearch/index.ts b/frontend/app/components/Dashboard/components/MetricsSearch/index.ts new file mode 100644 index 000000000..cf23f645d --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsSearch/index.ts @@ -0,0 +1 @@ +export { default } from './MetricsSearch'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx new file mode 100644 index 000000000..9d82d6288 --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx @@ -0,0 +1,37 @@ +import React from 'react'; +import { Button, PageTitle, Icon, Link } from 'UI'; +import withPageTitle from 'HOCs/withPageTitle'; +import MetricsList from '../MetricsList'; +import MetricsSearch from '../MetricsSearch'; +import { useStore } from 'App/mstore'; +import { useObserver } from 'mobx-react-lite'; + +interface Props{ + siteId: number; +} +function MetricsView(props: Props) { + const { siteId } = props; + const { metricStore } = useStore(); + const metricsCount = useObserver(() => metricStore.metrics.length); + + React.useEffect(() => { + metricStore.fetchList(); + }, []); + return useObserver(() => ( +
+
+
+ + {metricsCount} +
+ +
+ +
+
+ +
+ )); +} + +export default withPageTitle('Metrics - OpenReplay')(MetricsView); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/MetricsView/index.ts b/frontend/app/components/Dashboard/components/MetricsView/index.ts new file mode 100644 index 000000000..bfebac6b9 --- /dev/null +++ b/frontend/app/components/Dashboard/components/MetricsView/index.ts @@ -0,0 +1 @@ +export { default } from './MetricsView'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx b/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx new file mode 100644 index 000000000..eb63d8f08 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetChart/WidgetChart.tsx @@ -0,0 +1,140 @@ +import React, { useState, useRef, useEffect } from 'react'; +import CustomMetriLineChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetriLineChart'; +import CustomMetricPercentage from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPercentage'; +import CustomMetricTable from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricTable'; +import CustomMetricPieChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricPieChart'; +import { Styles } from 'App/components/Dashboard/Widgets/common'; +import { observer, useObserver } from 'mobx-react-lite'; +import { Loader } from 'UI'; +import { useStore } from 'App/mstore'; +import WidgetPredefinedChart from '../WidgetPredefinedChart'; +import CustomMetricOverviewChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart'; +import { getStartAndEndTimestampsByDensity } from 'Types/dashboard/helper'; +import { debounce } from 'App/utils'; +interface Props { + metric: any; + isWidget?: boolean +} +function WidgetChart(props: Props) { + const { isWidget = false, metric } = props; + const { dashboardStore, metricStore } = useStore(); + const _metric: any = useObserver(() => metricStore.instance); + const period = useObserver(() => dashboardStore.period); + const drillDownFilter = useObserver(() => dashboardStore.drillDownFilter); + const colors = Styles.customMetricColors; + const [loading, setLoading] = useState(true) + const isOverviewWidget = metric.metricType === 'predefined' && metric.viewType === 'overview'; + const params = { density: isOverviewWidget ? 7 : 70 } + const metricParams = { ...params } + const prevMetricRef = useRef(); + const [data, setData] = useState(metric.data); + + + const isTableWidget = metric.metricType === 'table' && metric.viewType === 'table'; + const isPieChart = metric.metricType === 'table' && metric.viewType === 'pieChart'; + + const onChartClick = (event: any) => { + if (event) { + if (isTableWidget || isPieChart) { + const periodTimestamps = period.toTimestamps() + drillDownFilter.merge({ + filters: event, + startTimestamp: periodTimestamps.startTimestamp, + endTimestamp: periodTimestamps.endTimestamp, + }); + } else { + const payload = event.activePayload[0].payload; + const timestamp = payload.timestamp; + const periodTimestamps = getStartAndEndTimestampsByDensity(timestamp, period.start, period.end, params.density); + + drillDownFilter.merge({ + startTimestamp: periodTimestamps.startTimestamp, + endTimestamp: periodTimestamps.endTimestamp, + }); + } + } + } + + const depsString = JSON.stringify(_metric.series); + + + const fetchMetricChartData = (metric, payload, isWidget) => { + setLoading(true) + dashboardStore.fetchMetricChartData(metric, payload, isWidget).then((res: any) => { + setData(res); + }).finally(() => { + setLoading(false); + }); + } + + const debounceRequest: any = React.useCallback(debounce(fetchMetricChartData, 500), []); + useEffect(() => { + if (prevMetricRef.current && prevMetricRef.current.name !== metric.name) { + prevMetricRef.current = metric; + return + }; + prevMetricRef.current = metric; + const payload = isWidget ? { ...params } : { ...metricParams, ...metric.toJson() }; + debounceRequest(metric, payload, isWidget); + }, [period, depsString]); + + const renderChart = () => { + const { metricType, viewType } = metric; + + if (metricType === 'predefined') { + if (isOverviewWidget) { + return + } + return + } + + if (metricType === 'timeseries') { + if (viewType === 'lineChart') { + return ( + + ) + } else if (viewType === 'progress') { + return ( + + ) + } + } + + if (metricType === 'table') { + if (viewType === 'table') { + return ; + } else if (viewType === 'pieChart') { + return ( + + ) + } + } + + return
Unknown
; + } + return useObserver(() => ( + + {renderChart()} + + )); +} + +export default observer(WidgetChart); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetChart/index.ts b/frontend/app/components/Dashboard/components/WidgetChart/index.ts new file mode 100644 index 000000000..0ea9108ea --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetChart/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetChart' diff --git a/frontend/app/components/Dashboard/components/WidgetForm/WidgetForm.tsx b/frontend/app/components/Dashboard/components/WidgetForm/WidgetForm.tsx new file mode 100644 index 000000000..79b44bc5a --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetForm/WidgetForm.tsx @@ -0,0 +1,225 @@ +import React, { useState } from 'react'; +import DropdownPlain from 'Shared/DropdownPlain'; +import { metricTypes, metricOf, issueOptions } from 'App/constants/filterOptions'; +import { FilterKey } from 'Types/filter/filterType'; +import { useStore } from 'App/mstore'; +import { useObserver } from 'mobx-react-lite'; +import { Button, Icon } from 'UI' +import FilterSeries from '../FilterSeries'; +import { confirm } from 'UI/Confirmation'; +import { withSiteId, dashboardMetricDetails, metricDetails } from 'App/routes' +import DashboardSelectionModal from '../DashboardSelectionModal/DashboardSelectionModal'; + +interface Props { + history: any; + match: any; + onDelete: () => void; +} + +function WidgetForm(props: Props) { + const [showDashboardSelectionModal, setShowDashboardSelectionModal] = useState(false); + const { history, match: { params: { siteId, dashboardId, metricId } } } = props; + const { metricStore, dashboardStore } = useStore(); + const dashboards = dashboardStore.dashboards; + const isSaving = useObserver(() => metricStore.isSaving); + const metric: any = useObserver(() => metricStore.instance); + + const timeseriesOptions = metricOf.filter(i => i.type === 'timeseries'); + const tableOptions = metricOf.filter(i => i.type === 'table'); + const isTable = metric.metricType === 'table'; + const _issueOptions = [{ text: 'All', value: 'all' }].concat(issueOptions); + const canAddToDashboard = metric.exists() && dashboards.length > 0; + const canAddSeries = metric.series.length < 3; + + const write = ({ target: { value, name } }) => metricStore.merge({ [ name ]: value }); + const writeOption = (e, { value, name }) => { + const obj = { [ name ]: value }; + + if (name === 'metricValue') { + obj['metricValue'] = [value]; + } + + if (name === 'metricOf') { + if (value === FilterKey.ISSUE) { + obj['metricValue'] = ['all']; + } + } + + if (name === 'metricType') { + if (value === 'timeseries') { + obj['metricOf'] = timeseriesOptions[0].value; + obj['viewType'] = 'lineChart'; + } else if (value === 'table') { + obj['metricOf'] = tableOptions[0].value; + obj['viewType'] = 'table'; + } + } + + metricStore.merge(obj); + }; + + const onSave = () => { + const wasCreating = !metric.exists() + metricStore.save(metric, dashboardId).then((metric) => { + if (wasCreating) { + if (parseInt(dashboardId) > 0) { + history.push(withSiteId(dashboardMetricDetails(parseInt(dashboardId), metric.metricId), siteId)); + } else { + history.push(withSiteId(metricDetails(metric.metricId), siteId)); + } + + } + }); + } + + const onDelete = async () => { + if (await confirm({ + header: 'Confirm', + confirmButton: 'Yes, delete', + confirmation: `Are you sure you want to permanently delete this metric?` + })) { + metricStore.delete(metric).then(props.onDelete); + } + } + + const onObserveChanges = () => { + // metricStore.fetchMetricChartData(metric); + } + + return useObserver(() => ( +
+
+ +
+ + + {metric.metricType === 'timeseries' && ( + <> + of + + + )} + + {metric.metricType === 'table' && ( + <> + of + + + )} + + {metric.metricOf === FilterKey.ISSUE && ( + <> + issue type + + + )} + + {metric.metricType === 'table' && ( + <> + showing + + + )} +
+
+ +
+
+ {`${isTable ? 'Filter by' : 'Chart Series'}`} + {!isTable && ( + + )} +
+ + {metric.series.length > 0 && metric.series.slice(0, isTable ? 1 : metric.series.length).map((series: any, index: number) => ( +
+ removeSeries(index)} + onRemoveSeries={() => metric.removeSeries(index)} + canDelete={metric.series.length > 1} + emptyMessage={isTable ? + 'Filter data using any event or attribute. Use Add Step button below to do so.' : + 'Add user event or filter to define the series by clicking Add Step.' + } + // observeChanges={onObserveChanges} + /> +
+ ))} +
+ +
+ +
+ {metric.exists() && ( + <> + + + + )} +
+
+ { canAddToDashboard && ( + setShowDashboardSelectionModal(false)} + /> + )} +
+ )); +} + +export default WidgetForm; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetForm/index.ts b/frontend/app/components/Dashboard/components/WidgetForm/index.ts new file mode 100644 index 000000000..283f9ec23 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetForm/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetForm'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx new file mode 100644 index 000000000..09e3b66b3 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx @@ -0,0 +1,58 @@ +import React, { useState, useRef, useEffect } from 'react'; +import { Icon } from 'UI'; + +interface Props { + name: string; + onUpdate: (name) => void; + seriesIndex?: number; + canEdit?: boolean +} +function WidgetName(props: Props) { + const { canEdit = true } = props; + const [editing, setEditing] = useState(false) + const [name, setName] = useState(props.name) + const ref = useRef(null) + + const write = ({ target: { value, name } }) => { + setName(value) + } + + const onBlur = () => { + setEditing(false) + props.onUpdate(name.trim() === '' ? 'New Widget' : name) + } + + useEffect(() => { + if (editing) { + ref.current.focus() + } + }, [editing]) + + useEffect(() => { + setName(props.name) + }, [props.name]) + + // const { name } = props; + return ( +
+ { editing ? ( + setEditing(true)} + /> + ) : ( +
{ name }
+ )} + + { canEdit &&
setEditing(true)}>
} +
+ ); +} + +export default WidgetName; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetName/index.ts b/frontend/app/components/Dashboard/components/WidgetName/index.ts new file mode 100644 index 000000000..322cc4441 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetName/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetName'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetPredefinedChart/WidgetPredefinedChart.tsx b/frontend/app/components/Dashboard/components/WidgetPredefinedChart/WidgetPredefinedChart.tsx new file mode 100644 index 000000000..c4aa215d7 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetPredefinedChart/WidgetPredefinedChart.tsx @@ -0,0 +1,109 @@ +import React from 'react'; +import { Styles } from 'App/components/Dashboard/Widgets/common'; +import CustomMetricOverviewChart from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricOverviewChart'; +import ErrorsByType from 'App/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByType'; +import ErrorsByOrigin from 'App/components/Dashboard/Widgets/PredefinedWidgets/ErrorsByOrigin'; +import ErrorsPerDomain from 'App/components/Dashboard/Widgets/PredefinedWidgets/ErrorsPerDomain'; +import { useObserver } from 'mobx-react-lite'; +import SessionsAffectedByJSErrors from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsAffectedByJSErrors'; +import CallsErrors4xx from 'App/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors4xx'; +import CallsErrors5xx from 'App/components/Dashboard/Widgets/PredefinedWidgets/CallsErrors5xx'; +import CPULoad from 'App/components/Dashboard/Widgets/PredefinedWidgets/CPULoad'; +import Crashes from 'App/components/Dashboard/Widgets/PredefinedWidgets/Crashes'; +import DomBuildingTime from 'App/components/Dashboard/Widgets/PredefinedWidgets/DomBuildingTime'; +import FPS from 'App/components/Dashboard/Widgets/PredefinedWidgets/FPS'; +import MemoryConsumption from 'App/components/Dashboard/Widgets/PredefinedWidgets/MemoryConsumption'; +import ResponseTime from 'App/components/Dashboard/Widgets/PredefinedWidgets/ResponseTime'; +import TimeToRender from 'App/components/Dashboard/Widgets/PredefinedWidgets/TimeToRender'; +import SlowestDomains from 'App/components/Dashboard/Widgets/PredefinedWidgets/SlowestDomains'; +import ResourceLoadedVsVisuallyComplete from 'App/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsVisuallyComplete'; +import SessionsImpactedBySlowRequests from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsImpactedBySlowRequests'; +import ResourceLoadingTime from 'App/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadingTime'; +import BreakdownOfLoadedResources from 'App/components/Dashboard/Widgets/PredefinedWidgets/BreakdownOfLoadedResources'; +import MissingResources from 'App/components/Dashboard/Widgets/PredefinedWidgets/MissingResources'; +import ResourceLoadedVsResponseEnd from 'App/components/Dashboard/Widgets/PredefinedWidgets/ResourceLoadedVsResponseEnd'; +import SessionsPerBrowser from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser'; +import CallWithErrors from '../../Widgets/PredefinedWidgets/CallWithErrors'; +import SpeedIndexByLocation from '../../Widgets/PredefinedWidgets/SpeedIndexByLocation'; +import SlowestResources from '../../Widgets/PredefinedWidgets/SlowestResources'; +import ResponseTimeDistribution from '../../Widgets/PredefinedWidgets/ResponseTimeDistribution'; + +interface Props { + data: any; + predefinedKey: string + metric?: any; +} +function WidgetPredefinedChart(props: Props) { + const { data, predefinedKey, metric } = props; + + const renderWidget = () => { + switch (predefinedKey) { + // ERRORS + case 'errors_per_type': + return + case 'errors_per_domains': + return + case 'resources_by_party': + return + case 'impacted_sessions_by_js_errors': + return + case 'domains_errors_4xx': + return + case 'domains_errors_5xx': + return + case 'calls_errors': + return + + // PERFORMANCE + case 'impacted_sessions_by_slow_pages': + return + case 'pages_response_time_distribution': + return + case 'speed_location': + return + case 'cpu': + return + case 'crashes': + return + case 'pages_dom_buildtime': + return + case 'fps': + return + case 'memory_consumption': + return + case 'pages_response_time': + return + case 'resources_vs_visually_complete': + return + case 'sessions_per_browser': + return + case 'slowest_domains': + return + case 'time_to_render': + return + + // Resources + case 'resources_count_by_type': + return + case 'missing_resources': + return + case 'resource_type_vs_response_end': + return + case 'resources_loading_time': + return + case 'slowest_resources': + return + + default: + return
Widget not supported
+ } + } + + return useObserver(() => ( + <> + {renderWidget()} + + )); +} + +export default WidgetPredefinedChart; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetPredefinedChart/index.ts b/frontend/app/components/Dashboard/components/WidgetPredefinedChart/index.ts new file mode 100644 index 000000000..e54ae37cd --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetPredefinedChart/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetPredefinedChart' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetPreview/WidgetPreview.tsx b/frontend/app/components/Dashboard/components/WidgetPreview/WidgetPreview.tsx new file mode 100644 index 000000000..24bfdcbd8 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetPreview/WidgetPreview.tsx @@ -0,0 +1,83 @@ +import React from 'react'; +import cn from 'classnames'; +import WidgetWrapper from '../WidgetWrapper'; +import { useStore } from 'App/mstore'; +import { Loader, NoContent, SegmentSelection, Icon } from 'UI'; +import DateRange from 'Shared/DateRange'; +import { useObserver } from 'mobx-react-lite'; + +interface Props { + className?: string; +} +function WidgetPreview(props: Props) { + const { className = '' } = props; + const { metricStore, dashboardStore } = useStore(); + const period = useObserver(() => dashboardStore.period); + const metric: any = useObserver(() => metricStore.instance); + const isTimeSeries = metric.metricType === 'timeseries'; + const isTable = metric.metricType === 'table'; + + const chagneViewType = (e, { name, value }) => { + metric.update({ [ name ]: value }); + } + + return useObserver(() => ( +
+
+

Trend

+
+ {isTimeSeries && ( + <> + Visualization + + + )} + + {isTable && ( + <> + Visualization + + + )} +
+ Time Range + dashboardStore.setPeriod(period)} + customRangeRight + direction="left" + /> +
+
+
+ +
+
+ )); +} + +export default WidgetPreview; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetPreview/index.ts b/frontend/app/components/Dashboard/components/WidgetPreview/index.ts new file mode 100644 index 000000000..9d28f8146 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetPreview/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetPreview'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx new file mode 100644 index 000000000..9f2f1cb3d --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -0,0 +1,129 @@ +import React, { useEffect, useState } from 'react'; +import { NoContent, Dropdown, Icon, Loader, Pagination } from 'UI'; +import cn from 'classnames'; +import { useStore } from 'App/mstore'; +import SessionItem from 'Shared/SessionItem'; +import { observer, useObserver } from 'mobx-react-lite'; +import { DateTime } from 'luxon'; +import { debounce } from 'App/utils'; +interface Props { + className?: string; +} +function WidgetSessions(props: Props) { + const { className = '' } = props; + const [data, setData] = useState([]); + + const [loading, setLoading] = useState(false); + const [seriesOptions, setSeriesOptions] = useState([ + { text: 'All', value: 'all' }, + ]); + + const [activeSeries, setActiveSeries] = useState('all'); + + const writeOption = (e, { name, value }) => setActiveSeries(value); + useEffect(() => { + if (!data) return; + const seriesOptions = data.map(item => ({ + text: item.seriesName, + value: item.seriesId, + })); + setSeriesOptions([ + { text: 'All', value: 'all' }, + ...seriesOptions, + ]); + }, [data]); + + const fetchSessions = (metricId, filter) => { + setLoading(true) + widget.fetchSessions(metricId, filter).then(res => { + setData(res) + }).finally(() => { + setLoading(false) + }); + } + + const filteredSessions = getListSessionsBySeries(data, activeSeries); + const { dashboardStore, metricStore } = useStore(); + const filter = useObserver(() => dashboardStore.drillDownFilter); + const widget: any = useObserver(() => metricStore.instance); + const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat('LLL dd, yyyy HH:mm a'); + const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat('LLL dd, yyyy HH:mm a'); + const debounceRequest: any = React.useCallback(debounce(fetchSessions, 1000), []); + + const depsString = JSON.stringify(widget.series); + useEffect(() => { + debounceRequest(widget.metricId, { ...filter, series: widget.toJsonDrilldown(), page: metricStore.sessionsPage, limit: metricStore.sessionsPageSize }); + }, [filter.startTimestamp, filter.endTimestamp, filter.filters, depsString, metricStore.sessionsPage]); + + return useObserver(() => ( +
+
+
+

Sessions

+
between {startTime} and {endTime}
+
+ + { widget.metricType !== 'table' && ( +
+ Series + } + /> +
+ )} +
+ +
+ + + {filteredSessions.sessions.map((session: any) => ( + + ))} + +
+ metricStore.updateKey('sessionsPage', page)} + limit={metricStore.sessionsPageSize} + debounceRequest={500} + /> +
+
+
+
+
+ )); +} + +const getListSessionsBySeries = (data, seriesId) => { + const arr: any = { sessions: [], total: 0 }; + data.forEach(element => { + if (seriesId === 'all') { + const sessionIds = arr.sessions.map(i => i.sessionId); + arr.sessions.push(...element.sessions.filter(i => !sessionIds.includes(i.sessionId))); + arr.total = element.total + } else { + if (element.seriesId === seriesId) { + arr.sessions.push(...element.sessions) + arr.total = element.total + } + } + }); + return arr; +} + +export default observer(WidgetSessions); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/index.ts b/frontend/app/components/Dashboard/components/WidgetSessions/index.ts new file mode 100644 index 000000000..8f309114a --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetSessions/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetSessions'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx b/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx new file mode 100644 index 000000000..1993cd2f6 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx @@ -0,0 +1,72 @@ +import React, { useState } from 'react'; +import { useStore } from 'App/mstore'; +import WidgetForm from '../WidgetForm'; +import WidgetPreview from '../WidgetPreview'; +import WidgetSessions from '../WidgetSessions'; +import { Icon, BackLink, Loader } from 'UI'; +import { useObserver } from 'mobx-react-lite'; +import { withSiteId } from 'App/routes'; +import WidgetName from '../WidgetName'; +interface Props { + history: any; + match: any + siteId: any +} +function WidgetView(props: Props) { + const { match: { params: { siteId, dashboardId, metricId } } } = props; + const { metricStore } = useStore(); + const widget = useObserver(() => metricStore.instance); + const loading = useObserver(() => metricStore.isLoading); + const [expanded, setExpanded] = useState(!metricId || metricId === 'create'); + + React.useEffect(() => { + if (metricId && metricId !== 'create') { + metricStore.fetch(metricId); + } else { + metricStore.init(); + } + }, []) + + const onBackHandler = () => { + if (dashboardId) { + props.history.push(withSiteId(`/dashboard/${dashboardId}`, siteId)); + } else { + props.history.push(withSiteId(`/metrics`, siteId)); + } + } + + return useObserver(() => ( + +
+ +
+
+

+ metricStore.merge({ name })} + canEdit={expanded} + /> +

+
+
setExpanded(!expanded)} + className="flex items-center cursor-pointer select-none" + > + {expanded ? 'Close' : 'Edit'} + +
+
+
+ + { expanded && } +
+ + + +
+
+ )); +} + +export default WidgetView; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetView/index.ts b/frontend/app/components/Dashboard/components/WidgetView/index.ts new file mode 100644 index 000000000..7bafa7a72 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetView/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetView' \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/AlertButton.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/AlertButton.tsx new file mode 100644 index 000000000..78d858b3e --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/AlertButton.tsx @@ -0,0 +1,28 @@ +import React from 'react'; +import { connect } from 'react-redux'; +import WidgetIcon from './WidgetIcon'; +import { init as initAlert } from 'Duck/alerts'; +import { useStore } from 'App/mstore'; + +interface Props { + seriesId: string; + initAlert: Function; +} +function AlertButton(props: Props) { + const { seriesId, initAlert } = props; + const { dashboardStore } = useStore(); + const onClick = () => { + initAlert({ query: { left: seriesId }}) + dashboardStore.updateKey('showAlertModal', true); + } + return ( + + ); +} + +export default connect(null, { initAlert })(AlertButton); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/TemplateOverlay.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/TemplateOverlay.tsx new file mode 100644 index 000000000..d95e9d894 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/TemplateOverlay.tsx @@ -0,0 +1,20 @@ +//@ts-nocheck +import React from 'react'; +import { Tooltip } from 'react-tippy'; + +function TemplateOverlay() { + return ( +
+ +
+ +
+ ); +} + +export default TemplateOverlay; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetIcon.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetIcon.tsx new file mode 100644 index 000000000..92d55ae47 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetIcon.tsx @@ -0,0 +1,28 @@ +//@ts-nocheck +import React from 'react'; +import { Icon } from 'UI'; +import { Tooltip } from 'react-tippy'; + +interface Props { + className: string + onClick: () => void + icon: string + tooltip: string +} +function WidgetIcon(props: Props) { + const { className, onClick, icon, tooltip } = props; + return ( + +
+ +
+
+ ); +} + +export default WidgetIcon; diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx new file mode 100644 index 000000000..b29ab800c --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/WidgetWrapper.tsx @@ -0,0 +1,130 @@ +import React, { useEffect, useRef } from 'react'; +import cn from 'classnames'; +import { ItemMenu } from 'UI'; +import { useDrag, useDrop } from 'react-dnd'; +import WidgetChart from '../WidgetChart'; +import { useObserver } from 'mobx-react-lite'; +// import { confirm } from 'UI/Confirmation'; +import { useStore } from 'App/mstore'; +import LazyLoad from 'react-lazyload'; +import { withRouter } from 'react-router-dom'; +import { withSiteId, dashboardMetricDetails } from 'App/routes'; +import TemplateOverlay from './TemplateOverlay'; +import WidgetIcon from './WidgetIcon'; +import AlertButton from './AlertButton'; + +interface Props { + className?: string; + widget?: any; + index?: number; + moveListItem?: any; + isPreview?: boolean; + isTemplate?: boolean + dashboardId?: string; + siteId?: string, + active?: boolean; + history?: any + onClick?: () => void; + isWidget?: boolean; +} +function WidgetWrapper(props: Props) { + const { dashboardStore } = useStore(); + const { isWidget = false, active = false, index = 0, moveListItem = null, isPreview = false, isTemplate = false, dashboardId, siteId } = props; + const widget: any = useObserver(() => props.widget); + const isPredefined = widget.metricType === 'predefined'; + const dashboard = useObserver(() => dashboardStore.selectedDashboard); + const isOverviewWidget = widget.widgetType === 'predefined' && widget.viewType === 'overview'; + + const [{ opacity, isDragging }, dragRef] = useDrag({ + type: 'item', + item: { index }, + collect: (monitor) => ({ + isDragging: monitor.isDragging(), + opacity: monitor.isDragging() ? 0.5 : 1, + }), + }); + + const [{ isOver, canDrop }, dropRef] = useDrop({ + accept: 'item', + drop: (item: any) => { + if (item.index === index) return; + moveListItem(item.index, index); + }, + collect: (monitor: any) => ({ + isOver: monitor.isOver(), + canDrop: monitor.canDrop(), + }), + }) + + const onDelete = async () => { + dashboardStore.deleteDashboardWidget(dashboard?.dashboardId, widget.widgetId); + // if (await confirm({ + // header: 'Confirm', + // confirmButton: 'Yes, delete', + // confirmation: `Are you sure you want to permanently delete the widget from this dashboard?` + // })) { + // dashboardStore.deleteDashboardWidget(dashboardId!, widget.widgetId); + // } + } + + const onChartClick = () => { + if (!isWidget || isPredefined) return; + + props.history.push(withSiteId(dashboardMetricDetails(dashboard?.dashboardId, widget.metricId),siteId)); + } + + const ref: any = useRef(null) + const dragDropRef: any = dragRef(dropRef(ref)) + + return useObserver(() => ( +
{}} + > + {isTemplate && } +
+

{widget.name}

+ {isWidget && ( +
+ {!isPredefined && ( + <> + +
+ + )} + + +
+ )} +
+ + +
+ +
+
+
+ )); +} + +export default withRouter(WidgetWrapper); \ No newline at end of file diff --git a/frontend/app/components/Dashboard/components/WidgetWrapper/index.ts b/frontend/app/components/Dashboard/components/WidgetWrapper/index.ts new file mode 100644 index 000000000..83890df93 --- /dev/null +++ b/frontend/app/components/Dashboard/components/WidgetWrapper/index.ts @@ -0,0 +1 @@ +export { default } from './WidgetWrapper'; \ No newline at end of file diff --git a/frontend/app/components/Dashboard/store/store.tsx b/frontend/app/components/Dashboard/store/store.tsx new file mode 100644 index 000000000..64e1944b9 --- /dev/null +++ b/frontend/app/components/Dashboard/store/store.tsx @@ -0,0 +1,15 @@ +import React from 'react' + +const StoreContext = React.createContext(null) + +export const DashboardStoreProvider = ({ children, store }) => { + return ( + {children} + ); +}; + +export const useDashboardStore = () => React.useContext(StoreContext); + +export const withDashboardStore = (Component) => (props) => { + return ; +}; \ No newline at end of file diff --git a/frontend/app/components/Errors/Error/ErrorInfo.js b/frontend/app/components/Errors/Error/ErrorInfo.js index 4726bc613..6754c2407 100644 --- a/frontend/app/components/Errors/Error/ErrorInfo.js +++ b/frontend/app/components/Errors/Error/ErrorInfo.js @@ -67,7 +67,7 @@ export default class ErrorInfo extends React.PureComponent {
diff --git a/frontend/app/components/Errors/Error/Trend.js b/frontend/app/components/Errors/Error/Trend.js index e6dcc1445..03b01909f 100644 --- a/frontend/app/components/Errors/Error/Trend.js +++ b/frontend/app/components/Errors/Error/Trend.js @@ -20,7 +20,6 @@ function Trend({ title = '', chart, onDateChange, timeFormat = 'hh:mm a' }) { if (!Array.isArray(chart)) return null const getDateFormat = val => { - console.log(val); const d = new Date(val); return (d.getMonth()+ 1) + '/' + d.getDate() } diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index 2fa91c5e5..82ecce40c 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -219,7 +219,7 @@ export default class List extends React.PureComponent { diff --git a/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js b/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js index 48142ef13..17078976c 100644 --- a/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js +++ b/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js @@ -143,7 +143,7 @@ export default connect((state, props) => { funnelId: props.match.params.funnelId, activeStages: state.getIn(['funnels', 'activeStages']), funnelFilters: state.getIn(['funnels', 'funnelFilters']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), liveFilters: state.getIn(['funnelFilters', 'appliedFilter']), } }, { diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js index 87f7983b7..a5b3bf445 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelDropdown.js @@ -33,5 +33,5 @@ function FunnelDropdown(props) { export default connect((state, props) => ({ funnels: state.getIn(['funnels', 'list']), funnel: state.getIn(['funnels', 'instance']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }), { })(withRouter(FunnelDropdown)) diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js index 6130351e2..2297ca324 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js @@ -1,7 +1,7 @@ import React, { useEffect, useState } from 'react'; import { Icon, BackLink, IconButton, Dropdown, Popup, TextEllipsis, Button } from 'UI'; import { remove as deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered } from 'Duck/funnels'; -import { editFilter, refresh, addFilter } from 'Duck/funnels'; +import { editFilter, editFunnelFilter, refresh, addFilter } from 'Duck/funnels'; import DateRange from 'Shared/DateRange'; import { connect } from 'react-redux'; import { confirm } from 'UI/Confirmation'; @@ -18,7 +18,7 @@ const Info = ({ label = '', value = '', className = 'mx-4' }) => { } const FunnelHeader = (props) => { - const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props; + const { funnel, insights, funnels, onBack, funnelId, showFilters = false, funnelFilters, renameHandler } = props; const [showSaveModal, setShowSaveModal] = useState(false) const writeOption = (e, { name, value }) => { @@ -40,7 +40,7 @@ const FunnelHeader = (props) => { } const onDateChange = (e) => { - props.editFilter(e, funnelId); + props.editFunnelFilter(e, funnelId); } const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS(); @@ -55,7 +55,7 @@ const FunnelHeader = (props) => { show={showSaveModal} closeHandler={() => setShowSaveModal(false)} /> -
+
{ />
@@ -109,5 +109,6 @@ const FunnelHeader = (props) => { } export default connect(state => ({ + funnelFilters: state.getIn([ 'funnels', 'funnelFilters' ]).toJS(), funnel: state.getIn([ 'funnels', 'instance' ]), -}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader) +}), { editFilter, editFunnelFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader) diff --git a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js index 6672bc580..77017e4ac 100644 --- a/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js +++ b/frontend/app/components/Funnels/FunnelIssueDetails/FunnelIssueDetails.js @@ -39,5 +39,5 @@ export default connect((state, props) => ({ issue: state.getIn(['funnels', 'issue']), issueId: props.match.params.issueId, funnelId: props.match.params.funnelId, - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }), { fetchIssue, setNavRef, resetIssue })(withRouter(FunnelIssueDetails)) diff --git a/frontend/app/components/Funnels/FunnelIssues/FunnelIssues.js b/frontend/app/components/Funnels/FunnelIssues/FunnelIssues.js index 82d39037c..036565d60 100644 --- a/frontend/app/components/Funnels/FunnelIssues/FunnelIssues.js +++ b/frontend/app/components/Funnels/FunnelIssues/FunnelIssues.js @@ -44,7 +44,7 @@ function FunnelIssues(props) { { filteredList.take(displayedCount).map(issue => ( @@ -73,7 +73,7 @@ export default connect(state => ({ list: state.getIn(['funnels', 'issues']), criticalIssuesCount: state.getIn(['funnels', 'criticalIssuesCount']), loading: state.getIn(['funnels', 'fetchIssuesRequest', 'loading']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), funnel: state.getIn(['funnels', 'instance']), activeStages: state.getIn(['funnels', 'activeStages']), funnelFilters: state.getIn(['funnels', 'funnelFilters']), diff --git a/frontend/app/components/Funnels/FunnelList/FunnelList.js b/frontend/app/components/Funnels/FunnelList/FunnelList.js index f7e3c70de..c96ef2e74 100644 --- a/frontend/app/components/Funnels/FunnelList/FunnelList.js +++ b/frontend/app/components/Funnels/FunnelList/FunnelList.js @@ -28,5 +28,5 @@ function FunnelList(props) { export default connect(state => ({ list: state.getIn(['funnels', 'list']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }))(withRouter(FunnelList)) diff --git a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js index 707049faa..f1b3ec67a 100644 --- a/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js +++ b/frontend/app/components/Funnels/FunnelSessionList/FunnelSessionList.js @@ -30,7 +30,7 @@ function FunnelSessionList(props) { { list.take(displayedCount).map(session => ( diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 6d541fca7..460f518d7 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -96,7 +96,6 @@ const Header = (props) => { className={ styles.nav } activeClassName={ styles.active } > - { 'Errors' } { className={ styles.nav } activeClassName={ styles.active } > - { 'Metrics' } + { 'Dashboards' }
@@ -151,7 +150,7 @@ export default withRouter(connect( state => ({ account: state.getIn([ 'user', 'account' ]), appearance: state.getIn([ 'user', 'account', 'appearance' ]), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]), showAlerts: state.getIn([ 'dashboard', 'showAlerts' ]), boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ]) diff --git a/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js b/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js index c6d7aa179..d143a6bbd 100644 --- a/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js +++ b/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js @@ -37,7 +37,7 @@ const styles = { }; @connect(state => ({ - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), boarding: state.getIn([ 'dashboard', 'boarding' ]), boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ]), }), { diff --git a/frontend/app/components/Header/SiteDropdown.js b/frontend/app/components/Header/SiteDropdown.js index 74b650055..38fe6ec57 100644 --- a/frontend/app/components/Header/SiteDropdown.js +++ b/frontend/app/components/Header/SiteDropdown.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { setSiteId } from 'Duck/user'; +import { setSiteId } from 'Duck/site'; import { withRouter } from 'react-router-dom'; import { hasSiteId, siteChangeAvaliable } from 'App/routes'; import { STATUS_COLOR_MAP, GREEN } from 'Types/site'; @@ -13,11 +13,13 @@ import { clearSearch } from 'Duck/search'; import { fetchList as fetchIntegrationVariables } from 'Duck/customField'; import { fetchList as fetchAlerts } from 'Duck/alerts'; import { fetchWatchdogStatus } from 'Duck/watchdogs'; +import { withStore } from 'App/mstore' +@withStore @withRouter @connect(state => ({ sites: state.getIn([ 'site', 'list' ]), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), account: state.getIn([ 'user', 'account' ]), }), { setSiteId, @@ -45,11 +47,16 @@ export default class SiteDropdown extends React.PureComponent { } switchSite = (siteId) => { + const { mstore } = this.props + + this.props.setSiteId(siteId); this.props.clearSearch(); this.props.fetchIntegrationVariables(); this.props.fetchAlerts(); this.props.fetchWatchdogStatus(); + + mstore.initClient(); } render() { diff --git a/frontend/app/components/Header/header.css b/frontend/app/components/Header/header.css index 427f95e18..e817d80fb 100644 --- a/frontend/app/components/Header/header.css +++ b/frontend/app/components/Header/header.css @@ -25,9 +25,11 @@ $height: 50px; color: $gray-darkest; text-transform: uppercase; white-space: nowrap; + transition: all .2s ease-in-out; &:hover, &.active { color: $teal; border-bottom: 2px solid $teal; + transition: all .2s ease-in-out; } position: relative; } diff --git a/frontend/app/components/Modal/Modal.js b/frontend/app/components/Modal/Modal.js index cb4738cd0..baf226621 100644 --- a/frontend/app/components/Modal/Modal.js +++ b/frontend/app/components/Modal/Modal.js @@ -1,13 +1,15 @@ -export default class Modal extends React.PureComponent { - constructor(props) { - super(props); - this.el = document.createElement('div'); - } +import React, { useEffect } from 'react'; +import ReactDOM from 'react-dom'; +import { useModal } from '.'; +import ModalOverlay from './ModalOverlay'; - render() { - return ReactDOM.createPortal( - this.props.children, - this.el, - ); - } +export default function Modal({ children }){ + const { component } = useModal(); + + return component ? ReactDOM.createPortal( + + {component} + , + document.querySelector("#modal-root"), + ) : null; } \ No newline at end of file diff --git a/frontend/app/components/Modal/ModalContext.js b/frontend/app/components/Modal/ModalContext.js index 197358f97..43ab9f8b8 100644 --- a/frontend/app/components/Modal/ModalContext.js +++ b/frontend/app/components/Modal/ModalContext.js @@ -1,12 +1,13 @@ -const ModalContext = React.createContext({ +import React, { Component, createContext } from 'react'; + +const ModalContext = createContext({ component: null, props: {}, - content: null, showModal: () => {}, hideModal: () => {} }); -export class ModalProvider extends React.PureComponent { +export class ModalProvider extends Component { showModal = (component, props = {}) => { this.setState({ component, @@ -14,10 +15,11 @@ export class ModalProvider extends React.PureComponent { }); }; - hideModal = () => this.setState({ - component: null, - props: {}, - }); + hideModal = () => + this.setState({ + component: null, + props: {} + }); state = { component: null, diff --git a/frontend/app/components/Modal/ModalOverlay.css b/frontend/app/components/Modal/ModalOverlay.css new file mode 100644 index 000000000..e3e33562a --- /dev/null +++ b/frontend/app/components/Modal/ModalOverlay.css @@ -0,0 +1,32 @@ +.overlay { + /* absolute w-full h-screen cursor-pointer */ + position: absolute; + width: 100%; + height: 100vh; + cursor: pointer; + /* transition: all 0.3s ease-in-out; */ + animation: fade 1s forwards; +} +.slide { + position: absolute; + left: -100%; + -webkit-animation: slide 0.5s forwards; + animation: slide 0.5s forwards; +} + +@keyframes fade { + 0% { + opacity: 0; + } + 100% { + opacity: 1; + } +} + +@-webkit-keyframes slide { + 100% { left: 0; } +} + +@keyframes slide { + 100% { left: 0; } +} \ No newline at end of file diff --git a/frontend/app/components/Modal/ModalOverlay.tsx b/frontend/app/components/Modal/ModalOverlay.tsx new file mode 100644 index 000000000..4dad0ec61 --- /dev/null +++ b/frontend/app/components/Modal/ModalOverlay.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import { useModal } from 'App/components/Modal'; +import stl from './ModalOverlay.css' + +function ModalOverlay({ children }) { + let modal = useModal(); + + return ( +
+
modal.hideModal()} + className={stl.overlay} + style={{ background: "rgba(0,0,0,0.5)" }} + /> +
{children}
+
+ ); +} + +export default ModalOverlay; \ No newline at end of file diff --git a/frontend/app/components/Modal/ModalRoot.js b/frontend/app/components/Modal/ModalRoot.js index 226447af1..98152c59e 100644 --- a/frontend/app/components/Modal/ModalRoot.js +++ b/frontend/app/components/Modal/ModalRoot.js @@ -1,3 +1,6 @@ +import React from 'react'; +import { ModalConsumer } from './ModalContext'; + const ModalRoot = () => ( {({ component: Component, props, hideModal }) => @@ -6,4 +9,4 @@ const ModalRoot = () => ( ); -export default ModalRoot \ No newline at end of file +export default ModalRoot; diff --git a/frontend/app/components/Modal/index.tsx b/frontend/app/components/Modal/index.tsx new file mode 100644 index 000000000..a653ed24f --- /dev/null +++ b/frontend/app/components/Modal/index.tsx @@ -0,0 +1,55 @@ +//@ts-nocheck +import React, { Component, createContext } from 'react'; +import Modal from './Modal'; + +const ModalContext = createContext({ + component: null, + props: {}, + showModal: (component: any, props: any) => {}, + hideModal: () => {} +}); + +export class ModalProvider extends Component { + + handleKeyDown = (e: any) => { + if (e.keyCode === 27) { + this.hideModal(); + } + } + + showModal = (component, props = {}) => { + this.setState({ + component, + props + }); + document.addEventListener('keydown', this.handleKeyDown); + }; + + hideModal = () => { + this.setState({ + component: null, + props: {} + }); + document.removeEventListener('keydown', this.handleKeyDown); + } + + state = { + component: null, + props: {}, + showModal: this.showModal, + hideModal: this.hideModal + }; + + render() { + return ( + + + {this.props.children} + + ); + } +} + +export const ModalConsumer = ModalContext.Consumer; + +export const useModal = () => React.useContext(ModalContext); \ No newline at end of file diff --git a/frontend/app/components/Modal/useModal.ts b/frontend/app/components/Modal/useModal.ts new file mode 100644 index 000000000..edcf08e98 --- /dev/null +++ b/frontend/app/components/Modal/useModal.ts @@ -0,0 +1,15 @@ +import React from "react"; + +export default () => { + let [modal, setModal] = React.useState(false); + let [modalContent, setModalContent] = React.useState("I'm the Modal Content"); + + let handleModal = (content = false) => { + setModal(!modal); + if (content) { + setModalContent(content); + } + }; + + return { modal, handleModal, modalContent }; +}; diff --git a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js index 7002ed604..fd3f9a7e8 100644 --- a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js +++ b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js @@ -19,7 +19,8 @@ const inputModeOptionsMap = {} inputModeOptions.forEach((o, i) => inputModeOptionsMap[o.value] = i) const ProjectCodeSnippet = props => { - const { site, gdpr } = props; + const site = props.sites.find(s => s.id === props.siteId); + const { gdpr } = site; const [changed, setChanged] = useState(false) const [copied, setCopied] = useState(false) @@ -72,7 +73,7 @@ const ProjectCodeSnippet = props => { } const getOptionValues = () => { - const { gdpr } = props.site; + // const { gdpr } = site; return (!!gdpr.maskEmails)|(!!gdpr.maskNumbers << 1)|(['plain' , 'obscured', 'hidden'].indexOf(gdpr.defaultInputMode) << 5)|28 } @@ -164,7 +165,8 @@ const ProjectCodeSnippet = props => { } export default connect(state => ({ - site: state.getIn([ 'site', 'instance' ]), - gdpr: state.getIn([ 'site', 'instance', 'gdpr' ]), + siteId: state.getIn([ 'site', 'siteId' ]), + sites: state.getIn([ 'site', 'list' ]), + // gdpr: state.getIn([ 'site', 'instance', 'gdpr' ]), saving: state.getIn([ 'site', 'saveGDPR', 'loading' ]) }), { editGDPR, saveGDPR })(ProjectCodeSnippet) diff --git a/frontend/app/components/Onboarding/components/ProjectFormButton/ProjectFormButton.js b/frontend/app/components/Onboarding/components/ProjectFormButton/ProjectFormButton.js index c7386fe7b..67dbd6541 100644 --- a/frontend/app/components/Onboarding/components/ProjectFormButton/ProjectFormButton.js +++ b/frontend/app/components/Onboarding/components/ProjectFormButton/ProjectFormButton.js @@ -3,8 +3,9 @@ import { connect } from 'react-redux' import { SlideModal } from 'UI' import NewSiteForm from '../../../Client/Sites/NewSiteForm' -const ProjectFormButton = ({ children, site }) => { +const ProjectFormButton = ({ children, sites, siteId }) => { const [showModal, setShowModal] = useState(false) + const site = sites.find(({ id }) => id === siteId) const closeModal = () => setShowModal(!showModal); @@ -27,5 +28,6 @@ const ProjectFormButton = ({ children, site }) => { } export default connect(state => ({ - site: state.getIn([ 'site', 'instance' ]), + siteId: state.getIn([ 'site', 'siteId' ]), + sites: state.getIn([ 'site', 'list' ]), }))(ProjectFormButton) \ No newline at end of file diff --git a/frontend/app/components/Session/Layout/PlayOverlay.js b/frontend/app/components/Session/Layout/PlayOverlay.js index 537460c26..3fb156172 100644 --- a/frontend/app/components/Session/Layout/PlayOverlay.js +++ b/frontend/app/components/Session/Layout/PlayOverlay.js @@ -1,8 +1,6 @@ import cn from 'classnames'; import { useCallback, useState } from 'react'; - import { Icon } from 'UI'; - import cls from './PlayOverlay.css'; export default function PlayOverlay({ player }) { @@ -11,20 +9,17 @@ export default function PlayOverlay({ player }) { const togglePlay = useCallback(() => { player.togglePlay(); setIconVisible(true); - setTimeout( - () => setIconVisible(false), - 800, - ); + setTimeout(() => setIconVisible(false), 800); }); return (
-
- -
-
+ className="absolute inset-0 flex items-center justify-center" + onClick={ togglePlay } + > +
+ +
+
); } diff --git a/frontend/app/components/Session/Layout/Player/Timeline.js b/frontend/app/components/Session/Layout/Player/Timeline.js index 5f05834ee..ca1383ffa 100644 --- a/frontend/app/components/Session/Layout/Player/Timeline.js +++ b/frontend/app/components/Session/Layout/Player/Timeline.js @@ -1,12 +1,9 @@ import { useCallback } from 'react'; import cn from 'classnames'; import { Popup } from 'UI'; - import { CRASHES, EVENTS } from 'Player/ios/state'; - import TimeTracker from './TimeTracker'; import PlayerTime from './PlayerTime'; - import cls from './timeline.css'; export default function Timeline({ player }) { @@ -19,7 +16,7 @@ export default function Timeline({ player }) { const time = Math.max(Math.round(p * player.state.endTime), 0); player.jump(time); }); - const scale = 100 / player.state.endTime; + const scale = 100 / player.state.endTime; return (
diff --git a/frontend/app/components/Session/Layout/ToolPanel/Performance.js b/frontend/app/components/Session/Layout/ToolPanel/Performance.js index 295085e46..f76c32f33 100644 --- a/frontend/app/components/Session/Layout/ToolPanel/Performance.js +++ b/frontend/app/components/Session/Layout/ToolPanel/Performance.js @@ -151,7 +151,6 @@ const NodesCountTooltip = ({ active, payload } ) => { const TICKS_COUNT = 10; function generateTicks(data: Array): Array { if (data.length === 0) return []; - console.log(data, data[0]) const minTime = data[0].time; const maxTime = data[data.length-1].time; diff --git a/frontend/app/components/Session/Layout/ToolPanel/StackEvents.js b/frontend/app/components/Session/Layout/ToolPanel/StackEvents.js index 0c10a38d1..6298a8a2e 100644 --- a/frontend/app/components/Session/Layout/ToolPanel/StackEvents.js +++ b/frontend/app/components/Session/Layout/ToolPanel/StackEvents.js @@ -66,7 +66,7 @@ function StackEvents({ export default connect(state => ({ hintIsHidden: state.getIn(['components', 'player', 'hiddenHints', 'stack']) || - !state.getIn([ 'user', 'client', 'sites' ]).some(s => s.stackIntegrations), + !state.getIn([ 'site', 'list' ]).some(s => s.stackIntegrations), }), { hideHint })(StackEvents); \ No newline at end of file diff --git a/frontend/app/components/Session/LiveSession.js b/frontend/app/components/Session/LiveSession.js index 46bd39cf5..0833112a2 100644 --- a/frontend/app/components/Session/LiveSession.js +++ b/frontend/app/components/Session/LiveSession.js @@ -27,7 +27,7 @@ function LiveSession({ useEffect(() => { if (sessionId != null) { - fetchSession(sessionId) + fetchSession(sessionId, true) } else { console.error("No sessionID in route.") } diff --git a/frontend/app/components/Session_/Fetch/FetchDetails.js b/frontend/app/components/Session_/Fetch/FetchDetails.js index b7a5386a1..6893c7194 100644 --- a/frontend/app/components/Session_/Fetch/FetchDetails.js +++ b/frontend/app/components/Session_/Fetch/FetchDetails.js @@ -44,7 +44,7 @@ export default class FetchDetails extends React.PureComponent { title="Body is Empty." size="small" show={ !payload } - icon="exclamation-circle" + animatedIcon="no-results" >
@@ -63,7 +63,7 @@ export default class FetchDetails extends React.PureComponent { title="Body is Empty." size="small" show={ !response } - icon="exclamation-circle" + animatedIcon="no-results" >
diff --git a/frontend/app/components/Session_/Fetch/components/Headers/Headers.tsx b/frontend/app/components/Session_/Fetch/components/Headers/Headers.tsx index fa941e36f..47ebff217 100644 --- a/frontend/app/components/Session_/Fetch/components/Headers/Headers.tsx +++ b/frontend/app/components/Session_/Fetch/components/Headers/Headers.tsx @@ -9,7 +9,7 @@ function Headers(props) { title="No data available." size="small" show={ !props.requestHeaders && !props.responseHeaders } - icon="exclamation-circle" + animatedIcon="no-results" > { props.requestHeaders && ( <> diff --git a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx index b83018c99..25170792d 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx @@ -21,7 +21,8 @@ interface Props { function PageInsightsPanel({ filters, fetchInsights, events = [], insights, urlOptions, host, loading = true }: Props) { - const [insightsFilters, setInsightsFilters] = useState(filters) + const [insightsFilters, setInsightsFilters] = useState(filters) + const defaultValue = (urlOptions && urlOptions[0]) ? urlOptions[0].value : '' const onDateChange = (e) => { const { startDate, endDate, rangeValue } = e; @@ -36,9 +37,11 @@ function PageInsightsPanel({ }, [insights]) useEffect(() => { - const url = insightsFilters.url ? insightsFilters.url : host + urlOptions[0].value; - Player.pause(); - fetchInsights({ ...insightsFilters, url }) + if (urlOptions && urlOptions[0]) { + const url = insightsFilters.url ? insightsFilters.url : host + urlOptions[0].value; + Player.pause(); + fetchInsights({ ...insightsFilters, url }) + } }, [insightsFilters]) const onPageSelect = (e, { name, value }) => { @@ -68,7 +71,7 @@ function PageInsightsPanel({ selection options={ urlOptions } name="url" - defaultValue={urlOptions[0].value} + defaultValue={defaultValue} onChange={ onPageSelect } id="change-dropdown" className="customDropdown" diff --git a/frontend/app/components/Session_/Performance/Performance.js b/frontend/app/components/Session_/Performance/Performance.js index 78aea13ce..3310970b3 100644 --- a/frontend/app/components/Session_/Performance/Performance.js +++ b/frontend/app/components/Session_/Performance/Performance.js @@ -129,7 +129,6 @@ const NodesCountTooltip = ({ active, payload} ) => { const TICKS_COUNT = 10; function generateTicks(data: Array): Array { if (data.length === 0) return []; - console.log(data, data[0]) const minTime = data[0].time; const maxTime = data[data.length-1].time; diff --git a/frontend/app/components/Session_/Player/Controls/Circle.tsx b/frontend/app/components/Session_/Player/Controls/Circle.tsx new file mode 100644 index 000000000..76740c73e --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/Circle.tsx @@ -0,0 +1,16 @@ +import React, { memo, FC } from 'react'; +import styles from './timeline.css'; + +interface Props { + preview?: boolean; +} +export const Circle: FC = memo(function Box({ preview }) { + return ( +
+ ) + }) + +export default Circle; \ No newline at end of file diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index 22fd3b0cf..ab1baba3e 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -118,6 +118,7 @@ export default class Controls extends React.Component { componentDidMount() { document.addEventListener('keydown', this.onKeyDown); } + componentWillUnmount() { document.removeEventListener('keydown', this.onKeyDown); //this.props.toggleInspectorMode(false); @@ -166,10 +167,10 @@ export default class Controls extends React.Component { return; } if (this.props.inspectorMode) return; - if (e.key === ' ') { - document.activeElement.blur(); - this.props.togglePlay(); - } + // if (e.key === ' ') { + // document.activeElement.blur(); + // this.props.togglePlay(); + // } if (e.key === 'Esc' || e.key === 'Escape') { this.props.fullscreenOff(); } @@ -262,7 +263,7 @@ export default class Controls extends React.Component { return (
- { !live && } + { !live && } { !fullscreen &&
diff --git a/frontend/app/components/Session_/Player/Controls/CustomDragLayer.tsx b/frontend/app/components/Session_/Player/Controls/CustomDragLayer.tsx new file mode 100644 index 000000000..c72f03ce2 --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/CustomDragLayer.tsx @@ -0,0 +1,98 @@ +import React, { memo } from 'react'; +import { useDragLayer } from "react-dnd"; +import Circle from './Circle' +import type { CSSProperties, FC } from 'react' + +const layerStyles: CSSProperties = { + position: "fixed", + pointerEvents: "none", + zIndex: 100, + left: 0, + top: 0, + width: "100%", + height: "100%" + }; + +const ItemTypes = { + BOX: 'box', +} + +function getItemStyles(initialOffset, currentOffset, maxX, minX) { + if (!initialOffset || !currentOffset) { + return { + display: "none" + }; + } + let { x, y } = currentOffset; + // if (isSnapToGrid) { + // x -= initialOffset.x; + // y -= initialOffset.y; + // [x, y] = [x, y]; + // x += initialOffset.x; + // y += initialOffset.y; + // } + if (x > maxX) { + x = maxX; + } + + if (x < minX) { + x = minX; + } + const transform = `translate(${x}px, ${initialOffset.y}px)`; + return { + transition: 'transform 0.1s ease-out', + transform, + WebkitTransform: transform + }; +} + +interface Props { + onDrag: (offset: { x: number, y: number } | null) => void; + maxX: number; + minX: number; +} + +const CustomDragLayer: FC = memo(function CustomDragLayer(props) { + const { + itemType, + isDragging, + item, + initialOffset, + currentOffset, + } = useDragLayer((monitor) => ({ + item: monitor.getItem(), + itemType: monitor.getItemType(), + initialOffset: monitor.getInitialSourceClientOffset(), + currentOffset: monitor.getSourceClientOffset(), + isDragging: monitor.isDragging(), + })); + + function renderItem() { + switch (itemType) { + case ItemTypes.BOX: + return ; + default: + return null; + } + } + + if (!isDragging) { + return null; + } + + if (isDragging) { + props.onDrag(currentOffset) + } + + return ( +
+
+ {renderItem()} +
+
+ ); +}) + +export default CustomDragLayer; \ No newline at end of file diff --git a/frontend/app/components/Session_/Player/Controls/DraggableCircle.tsx b/frontend/app/components/Session_/Player/Controls/DraggableCircle.tsx new file mode 100644 index 000000000..f4ebd6abf --- /dev/null +++ b/frontend/app/components/Session_/Player/Controls/DraggableCircle.tsx @@ -0,0 +1,67 @@ +import React, { memo, FC, useEffect, useRef, CSSProperties } from 'react'; +import type { DragSourceMonitor } from 'react-dnd' +import { useDrag } from 'react-dnd' +import { getEmptyImage } from 'react-dnd-html5-backend' +import Circle from './Circle' + +function getStyles( + left: number, + isDragging: boolean, + ): CSSProperties { + // const transform = `translate3d(${(left * 1161) / 100}px, -8px, 0)` + return { + position: 'absolute', + top: '-3px', + left: `${left}%`, + // transform, + // WebkitTransform: transform, + // IE fallback: hide the real node using CSS when dragging + // because IE will ignore our custom "empty image" drag preview. + opacity: isDragging ? 0 : 1, + height: isDragging ? 0 : '', + zIndex: '99', + cursor: 'move' + } +} + +const ItemTypes = { + BOX: 'box', +} + +interface Props { + left: number; + top: number; + onDrop?: (item, monitor) => void; +} + +const DraggableCircle: FC = memo(function DraggableCircle(props) { + const { left, top } = props + const [{ isDragging, item }, dragRef, preview] = useDrag( + () => ({ + type: ItemTypes.BOX, + item: { left, top }, + end: props.onDrop, + collect: (monitor: DragSourceMonitor) => ({ + isDragging: monitor.isDragging(), + item: monitor.getItem(), + }), + }), + [left, top], + ) + + useEffect(() => { + preview(getEmptyImage(), { captureDraggingState: true }) + }, []) + + return ( +
+ +
+ ); +}) + +export default DraggableCircle \ No newline at end of file diff --git a/frontend/app/components/Session_/Player/Controls/TimeTracker.js b/frontend/app/components/Session_/Player/Controls/TimeTracker.js index be91f69fe..e3de669e5 100644 --- a/frontend/app/components/Session_/Player/Controls/TimeTracker.js +++ b/frontend/app/components/Session_/Player/Controls/TimeTracker.js @@ -4,10 +4,6 @@ import styles from './timeTracker.css'; const TimeTracker = ({ time, scale }) => ( -
{ // exception, @@ -50,7 +53,11 @@ const getPointerIcon = (type) => { return 'info'; } + +let deboucneJump = () => null; @connectPlayer(state => ({ + playing: state.playing, + time: state.time, skipIntervals: state.skipIntervals, events: state.eventList, skip: state.skip, @@ -72,6 +79,9 @@ const getPointerIcon = (type) => { state.getIn([ 'sessions', 'current', 'returningLocationTime' ]), }), { setTimelinePointer }) export default class Timeline extends React.PureComponent { + progressRef = React.createRef() + wasPlaying = false + seekProgress = (e) => { const { endTime } = this.props; const p = e.nativeEvent.offsetX / e.target.offsetWidth; @@ -86,13 +96,33 @@ export default class Timeline extends React.PureComponent { } componentDidMount() { - const { issues, events, fetchList, skipToIssue } = this.props; + const { issues, skipToIssue } = this.props; const firstIssue = issues.get(0); + deboucneJump = debounce(this.props.jump, 500); + if (firstIssue && skipToIssue) { this.props.jump(firstIssue.time); } } + onDragEnd = (item, monitor) => { + if (this.wasPlaying) { + this.props.togglePlay(); + } + } + + onDrag = (offset) => { + const { endTime } = this.props; + + const p = (offset.x - 60) / this.progressRef.current.offsetWidth; + const time = Math.max(Math.round(p * endTime), 0); + deboucneJump(time); + if (this.props.playing) { + this.wasPlaying = true; + this.props.pause(); + } + } + render() { const { events, @@ -103,20 +133,27 @@ export default class Timeline extends React.PureComponent { live, logList, exceptionsList, - resourceList, + resourceList, clickRageTime, stackList, fetchList, - issues + issues, } = this.props; const scale = 100 / endTime; + return (
{ !live && } -
+
+ + { skip && skipIntervals.map(interval => (
({ - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), nextId: parseInt(state.getIn([ 'sessions', 'nextId' ])), }))(AutoplayTimer)) diff --git a/frontend/app/components/Session_/Player/Overlay/ElementsMarker/Marker.tsx b/frontend/app/components/Session_/Player/Overlay/ElementsMarker/Marker.tsx index 92568ff49..8149a9278 100644 --- a/frontend/app/components/Session_/Player/Overlay/ElementsMarker/Marker.tsx +++ b/frontend/app/components/Session_/Player/Overlay/ElementsMarker/Marker.tsx @@ -1,3 +1,4 @@ +//@ts-nocheck import React from 'react'; import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen'; import { Tooltip } from 'react-tippy'; diff --git a/frontend/app/components/Session_/Player/Overlay/PlayIconLayer.tsx b/frontend/app/components/Session_/Player/Overlay/PlayIconLayer.tsx index ff23870df..a2a1b9e27 100644 --- a/frontend/app/components/Session_/Player/Overlay/PlayIconLayer.tsx +++ b/frontend/app/components/Session_/Player/Overlay/PlayIconLayer.tsx @@ -1,4 +1,4 @@ -import React, { useState, useCallback } from 'react'; +import React, { useState, useCallback, useEffect } from 'react'; import cn from 'classnames'; import { Icon } from 'UI'; @@ -12,14 +12,28 @@ interface Props { export default function PlayIconLayer({ playing, togglePlay }: Props) { const [ showPlayOverlayIcon, setShowPlayOverlayIcon ] = useState(false); + + useEffect(() => { + // TODO Find a better way to do this + document.addEventListener('keydown', onKeyDown); + + return () => { + document.removeEventListener('keydown', onKeyDown); + } + }, []) + + const onKeyDown = (e) => { + if (e.key === ' ') { + togglePlayAnimated() + } + } + const togglePlayAnimated = useCallback(() => { setShowPlayOverlayIcon(true); togglePlay(); - setTimeout( - () => setShowPlayOverlayIcon(false), - 800, - ); + setTimeout(() => setShowPlayOverlayIcon(false), 800); }, []); + return (
i.key), closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !session.live), } diff --git a/frontend/app/components/Session_/StackEvents/StackEvents.js b/frontend/app/components/Session_/StackEvents/StackEvents.js index 7145bf7fd..79c1e9c72 100644 --- a/frontend/app/components/Session_/StackEvents/StackEvents.js +++ b/frontend/app/components/Session_/StackEvents/StackEvents.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { connectPlayer } from 'Player'; +import { connectPlayer, jump } from 'Player'; import { NoContent, Tabs } from 'UI'; import withEnumToggle from 'HOCs/withEnumToggle'; import { hideHint } from 'Duck/components/player'; @@ -18,7 +18,7 @@ const TABS = [ ALL, ...typeList ].map(tab =>({ text: tab, key: tab })); })) @connect(state => ({ hintIsHidden: state.getIn(['components', 'player', 'hiddenHints', 'stack']) || - !state.getIn([ 'user', 'client', 'sites' ]).some(s => s.stackIntegrations), + !state.getIn([ 'site', 'list' ]).some(s => s.stackIntegrations), }), { hideHint }) @@ -66,7 +66,11 @@ export default class StackEvents extends React.PureComponent { > { filteredStackEvents.map(userEvent => ( - + jump(userEvent.time) } + /> ))} diff --git a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js index 93a901f0a..9c0e66816 100644 --- a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js +++ b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js @@ -1,6 +1,6 @@ import cn from 'classnames'; import { OPENREPLAY, SENTRY, DATADOG, STACKDRIVER } from 'Types/session/stackEvent'; -import { Modal, Icon, SlideModal } from 'UI'; +import { Modal, Icon, SlideModal, IconButton } from 'UI'; import withToggle from 'HOCs/withToggle'; import Sentry from './Sentry'; import JsonViewer from './JsonViewer'; @@ -54,34 +54,42 @@ export default class UserEvent extends React.PureComponent { return !!this.props.userEvent.payload; } + onClickDetails = (e) => { + e.stopPropagation(); + this.props.switchOpen(); + } + renderContent(modalTrigger) { const { userEvent } = this.props; //const message = this.getEventMessage(); return (
-
-
- - { userEvent.name } -
- { /* message && -
- { message } -
*/ - } -
+ // onClick={ this.props.switchOpen } // + onClick={ this.props.onJump } // + className={ + cn( + "group", + stl.userEvent, + this.getLevelClassname(), + { [ stl.modalTrigger ]: modalTrigger } + ) + } + > +
+
+ + { userEvent.name } +
+ { /* message && +
+ { message } +
*/ + } +
+ +
+
); } @@ -91,15 +99,15 @@ export default class UserEvent extends React.PureComponent { if (this.ifNeedModal()) { return ( - - { this.renderContent(true) } - + + { this.renderContent(true) } + // DragSource(name, cardSource, (connect, monitor) => ({ connectDragSource: connect.dragSource(), diff --git a/frontend/app/components/hocs/withSiteIdRouter.js b/frontend/app/components/hocs/withSiteIdRouter.js index 1dd1a5f8b..806d3904e 100644 --- a/frontend/app/components/hocs/withSiteIdRouter.js +++ b/frontend/app/components/hocs/withSiteIdRouter.js @@ -1,13 +1,13 @@ import { withRouter } from 'react-router-dom'; import { connect } from 'react-redux'; import { withSiteId } from 'App/routes'; -import { setSiteId } from 'Duck/user'; +import { setSiteId } from 'Duck/site'; export default BaseComponent => @withRouter @connect((state, props) => ({ urlSiteId: props.match.params.siteId, - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }), { setSiteId, }) diff --git a/frontend/app/components/hocs/withSiteIdUpdater.js b/frontend/app/components/hocs/withSiteIdUpdater.js index 67a7dbc60..9319474ca 100644 --- a/frontend/app/components/hocs/withSiteIdUpdater.js +++ b/frontend/app/components/hocs/withSiteIdUpdater.js @@ -1,11 +1,11 @@ import { connect } from 'react-redux'; import { withSiteId } from 'App/routes'; -import { setSiteId } from 'Duck/user'; +import { setSiteId } from 'Duck/site'; export default BaseComponent => @connect((state, props) => ({ urlSiteId: props.match.params.siteId, - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }), { setSiteId, }) diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx index 28a0cbc42..89d78ea7a 100644 --- a/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx +++ b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx @@ -98,7 +98,7 @@ function CustomMetricForm(props: Props) { autoFocus={ true } className="text-lg" name="name" - style={{ fontSize: '18px', padding: '10px', fontWeight: '600'}} + style={{ fontSize: '18px', padding: '10px', fontWeight: 600}} value={ metric.name } onChange={ write } placeholder="Metric Title" diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx index 4c8b8b6ee..8f8aca480 100644 --- a/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx +++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx @@ -86,13 +86,15 @@ function FilterSeries(props: Props) {
{emptyMessage}
)}
-
- - - +
+
+ + + +
)} diff --git a/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx index 7da36c6a9..921795f97 100644 --- a/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx +++ b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx @@ -101,7 +101,7 @@ function SessionListModal(props: Props) { { filteredSessions.map(session => ) } diff --git a/frontend/app/components/shared/DropdownPlain/DropdownPlain.css b/frontend/app/components/shared/DropdownPlain/DropdownPlain.css index dd7b9a2a5..11f7b8923 100644 --- a/frontend/app/components/shared/DropdownPlain/DropdownPlain.css +++ b/frontend/app/components/shared/DropdownPlain/DropdownPlain.css @@ -1,6 +1,7 @@ .dropdown { display: flex !important; - padding: 4px; + justify-content: space-between; + padding: 4px 8px; border-radius: 3px; color: $gray-darkest; font-weight: 500; diff --git a/frontend/app/components/shared/DropdownPlain/DropdownPlain.tsx b/frontend/app/components/shared/DropdownPlain/DropdownPlain.tsx index 7d2b4c1da..2aa1930d3 100644 --- a/frontend/app/components/shared/DropdownPlain/DropdownPlain.tsx +++ b/frontend/app/components/shared/DropdownPlain/DropdownPlain.tsx @@ -8,7 +8,7 @@ interface Props { onChange: (e, { name, value }) => void; icon?: string; direction?: string; - value: any; + value?: any; multiple?: boolean; } diff --git a/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js b/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js index daee15cc3..9fa42147c 100644 --- a/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js +++ b/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js @@ -138,7 +138,6 @@ class AttributeValueField extends React.PureComponent { const _showAutoComplete = this.isAutoComplete(filter.type); const _params = _showAutoComplete ? this.getParams(filter) : {}; let _optionsEndpoint= '/events/search'; - console.log('value', filter.value) return ( diff --git a/frontend/app/components/shared/EventFilter/EventEditor.js b/frontend/app/components/shared/EventFilter/EventEditor.js index f2d5dee8f..4fe2c5ee8 100644 --- a/frontend/app/components/shared/EventFilter/EventEditor.js +++ b/frontend/app/components/shared/EventFilter/EventEditor.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { DNDSource, DNDTarget } from 'Components/hocs/dnd'; +// import { DNDSource, DNDTarget } from 'Components/hocs/dnd'; import Event, { TYPES } from 'Types/filter/event'; import { operatorOptions } from 'Types/filter'; import { editEvent, removeEvent, clearEvents, applyFilter } from 'Duck/funnelFilters'; @@ -24,8 +24,8 @@ const getLabel = ({ type }) => { return getPlaceholder({ type }); }; -@DNDTarget('event') -@DNDSource('event') +// @DNDTarget('event') +// @DNDSource('event') @connect(state => ({ isLastEvent: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 1, funnel: state.getIn(['funnels', 'instance']), diff --git a/frontend/app/components/shared/EventFilter/EventFilter.js b/frontend/app/components/shared/EventFilter/EventFilter.js index 9a852398e..ca3db8fc5 100644 --- a/frontend/app/components/shared/EventFilter/EventFilter.js +++ b/frontend/app/components/shared/EventFilter/EventFilter.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { DNDContext } from 'Components/hocs/dnd'; +// import { DNDContext } from 'Components/hocs/dnd'; import { addEvent, applyFilter, moveEvent, clearEvents, addCustomFilter, addAttribute, setSearchQuery, setActiveFlow, setFilterOption @@ -39,7 +39,7 @@ import CustomFilters from './CustomFilters'; updateFunnelFilters, refreshFunnel }) -@DNDContext +// @DNDContext export default class EventFilter extends React.PureComponent { state = { search: '', showFilterModal: false, showPlacehoder: true, showSaveModal: false } fetchEventList = debounce(this.props.fetchEventList, 500) diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx index 62ec51c9d..a97f3573c 100644 --- a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx +++ b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx @@ -60,7 +60,7 @@ function FilterAutoComplete(props: Props) { .finally(() => setLoading(false)); } - const debouncedRequestValues = React.useCallback(debounce(requestValues, 1000), []); + const debouncedRequestValues = React.useCallback(debounce(requestValues, 1000), [params]); const onInputChange = ({ target: { value } }) => { setQuery(value); diff --git a/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx index 542dfce1c..2030d422a 100644 --- a/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx +++ b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx @@ -1,9 +1,6 @@ import React, { useState, useEffect } from 'react'; -import { Icon, Loader } from 'UI'; -// import { debounce } from 'App/utils'; +import { Icon } from 'UI'; import stl from './FilterAutoCompleteLocal.css'; -// import cn from 'classnames'; - interface Props { showOrButton?: boolean; showCloseButton?: boolean; @@ -15,6 +12,7 @@ interface Props { icon?: string; type?: string; isMultilple?: boolean; + allowDecimals?: boolean; } function FilterAutoCompleteLocal(props: Props) { @@ -28,15 +26,24 @@ function FilterAutoCompleteLocal(props: Props) { icon = null, type = "text", isMultilple = true, + allowDecimals = true, } = props; const [showModal, setShowModal] = useState(true) const [query, setQuery] = useState(value); - // const debounceOnSelect = debounce(props.onSelect, 500); - const onInputChange = ({ target: { value } }) => { - setQuery(value); - props.onSelect(null, { value }); - } + const onInputChange = (e) => { + if(allowDecimals) { + const value = e.target.value; + setQuery(value); + props.onSelect(null, { value }); + } else { + const value = e.target.value.replace(/[^\d]/, ""); + if (+value !== 0) { + setQuery(value); + props.onSelect(null, { value }); + } + } + }; useEffect(() => { setQuery(value); @@ -58,7 +65,7 @@ function FilterAutoCompleteLocal(props: Props) {
setShowModal(true)} value={ query } diff --git a/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx b/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx index 6a3829699..579a74231 100644 --- a/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx +++ b/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx @@ -4,7 +4,7 @@ import FilterSelection from '../FilterSelection'; import FilterValue from '../FilterValue'; import { Icon } from 'UI'; import FilterSource from '../FilterSource'; -import { FilterType } from 'App/types/filter/filterType'; +import { FilterKey, FilterType } from 'App/types/filter/filterType'; import SubFilterItem from '../SubFilterItem'; interface Props { @@ -13,9 +13,10 @@ interface Props { onUpdate: (filter) => void; onRemoveFilter: () => void; isFilter?: boolean; + saveRequestPayloads?: boolean; } function FilterItem(props: Props) { - const { isFilter = false, filterIndex, filter } = props; + const { isFilter = false, filterIndex, filter, saveRequestPayloads } = props; const canShowValues = !(filter.operator === "isAny" || filter.operator === "onAny" || filter.operator === "isUndefined"); const isSubFilter = filter.type === FilterType.SUB_FILTERS; @@ -83,7 +84,7 @@ function FilterItem(props: Props) { {/* filters */} {isSubFilter && (
- {filter.filters.map((subFilter, subFilterIndex) => ( + {filter.filters.filter(i => (i.key !== FilterKey.FETCH_REQUEST_BODY && i.key !== FilterKey.FETCH_RESPONSE_BODY) || saveRequestPayloads).map((subFilter, subFilterIndex) => ( void; onChangeEventsOrder: (e, { name, value }) => void; hideEventsOrder?: boolean; + observeChanges?: () => void; + saveRequestPayloads?: boolean; } function FilterList(props: Props) { - const { filter, hideEventsOrder = false } = props; - const filters = filter.filters; - const hasEvents = filter.filters.filter(i => i.isEvent).size > 0; - const hasFilters = filter.filters.filter(i => !i.isEvent).size > 0; + const { observeChanges = () => {}, filter, hideEventsOrder = false, saveRequestPayloads } = props; + const filters = List(filter.filters); + const hasEvents = filters.filter((i: any) => i.isEvent).size > 0; + const hasFilters = filters.filter((i: any) => !i.isEvent).size > 0; let rowIndex = 0; + useEffect(observeChanges, [filters]); + const onRemoveFilter = (filterIndex) => { props.onRemoveFilter(filterIndex); } - return ( + return useObserver(() => (
{ hasEvents && ( <> @@ -54,13 +60,14 @@ function FilterList(props: Props) {
)}
- {filters.map((filter, filterIndex) => filter.isEvent ? ( + {filters.map((filter: any, filterIndex: any) => filter.isEvent ? ( props.onUpdateFilter(filterIndex, filter)} onRemoveFilter={() => onRemoveFilter(filterIndex) } + saveRequestPayloads={saveRequestPayloads} /> ): null)}
@@ -71,7 +78,7 @@ function FilterList(props: Props) { <> {hasEvents &&
}
FILTERS
- {filters.map((filter, filterIndex) => !filter.isEvent ? ( + {filters.map((filter: any, filterIndex: any) => !filter.isEvent ? ( )}
- ); + )); } export default FilterList; \ No newline at end of file diff --git a/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx b/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx index cb9c6f768..04688de6b 100644 --- a/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx +++ b/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx @@ -73,7 +73,7 @@ function FilterModal(props: Props) {
{key}
{filters[key].map((filter: any) => ( -
onFilterClick(filter)}> +
onFilterClick({ ...filter, value: [''] })}> {filter.label}
diff --git a/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx index ba7b8650e..4e8d4001f 100644 --- a/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx +++ b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx @@ -12,7 +12,7 @@ interface Props { } function FilterValue(props: Props) { const { filter } = props; - const [durationValues, setDurationValues] = useState({ minDuration: filter.value[0], maxDuration: filter.value[1] }); + const [durationValues, setDurationValues] = useState({ minDuration: filter.value[0], maxDuration: filter.value.length > 1 ? filter.value[1] : filter.value[0] }); const showCloseButton = filter.value.length > 1; const lastIndex = filter.value.length - 1; @@ -137,6 +137,7 @@ function FilterValue(props: Props) { onSelect={(e, item) => debounceOnSelect(e, item, valueIndex)} icon={filter.icon} type="number" + allowDecimals={false} isMultilple={false} /> // props.updateCurrentPage(props.currentPage + 1) - useEffect(() => { - if (filters.size === 0) { - props.addFilterByKeyAndValue(FilterKey.USERID, ''); - } - }, []); + // useEffect(() => { + // if (filters.size === 0) { + // props.addFilterByKeyAndValue(FilterKey.USERID, ''); + // } + // }, []); useEffect(() => { if (metaList.size === 0 || !!sort.field) return; diff --git a/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx b/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx index ae7a60b28..cc3fef487 100644 --- a/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx +++ b/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx @@ -4,9 +4,9 @@ import { connect } from 'react-redux'; import { edit, addFilter, addFilterByKeyAndValue } from 'Duck/liveSearch'; import FilterSelection from 'Shared/Filters/FilterSelection'; import { IconButton } from 'UI'; -import { FilterKey } from 'App/types/filter/filterType'; interface Props { + list: any, appliedFilter: any; edit: typeof edit; addFilter: typeof addFilter; @@ -42,9 +42,9 @@ function LiveSessionSearch(props: Props) { }); props.edit({ filters: newFilters, }); - if (newFilters.size === 0) { - props.addFilterByKeyAndValue(FilterKey.USERID, ''); - } + // if (newFilters.size === 0) { + // props.addFilterByKeyAndValue(FilterKey.USERID, ''); + // } } const onChangeEventsOrder = (e, { name, value }) => { @@ -53,16 +53,18 @@ function LiveSessionSearch(props: Props) { }); } - return (hasEvents || hasFilters) ? ( + return props.list.size > 0 ? (
-
- -
+ { hasEvents || hasFilters && ( +
+ +
+ )}
@@ -80,4 +82,5 @@ function LiveSessionSearch(props: Props) { export default connect(state => ({ appliedFilter: state.getIn([ 'liveSearch', 'instance' ]), + list: state.getIn(['sessions', 'liveSessions']), }), { edit, addFilter, addFilterByKeyAndValue })(LiveSessionSearch); \ No newline at end of file diff --git a/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js b/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js index cee55088b..bee2810d8 100644 --- a/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js +++ b/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js @@ -35,6 +35,6 @@ const NoSessionsMessage= (props) => { } export default connect(state => ({ - site: state.getIn([ 'site', 'instance' ]), + site: state.getIn([ 'site', 'siteId' ]), sites: state.getIn([ 'site', 'list' ]) }))(withRouter(NoSessionsMessage)) \ No newline at end of file diff --git a/frontend/app/components/shared/ResultTimings/ResultTimings.js b/frontend/app/components/shared/ResultTimings/ResultTimings.js index 2cd82ee1e..120233074 100644 --- a/frontend/app/components/shared/ResultTimings/ResultTimings.js +++ b/frontend/app/components/shared/ResultTimings/ResultTimings.js @@ -24,7 +24,7 @@ function ResultTimings({ duration, timing }) { return ( diff --git a/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx index 1ba6b3d56..668b657b0 100644 --- a/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx +++ b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx @@ -38,7 +38,7 @@ function SaveSearchModal(props: Props) { const onDelete = async () => { if (await confirm({ header: 'Confirm', - confirmButton: 'Yes, Delete', + confirmButton: 'Yes, delete', confirmation: `Are you sure you want to permanently delete this Saved search?`, })) { props.remove(savedSearch.searchId).then(() => { diff --git a/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx index a141dbb5b..61f566680 100644 --- a/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx +++ b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx @@ -42,7 +42,7 @@ function SavedSearchDropdown(props: Props) { const onDelete = async (instance) => { if (await confirm({ header: 'Confirm', - confirmButton: 'Yes, Delete', + confirmButton: 'Yes, delete', confirmation: `Are you sure you want to permanently delete this search?` })) { props.remove(instance.alertId).then(() => { diff --git a/frontend/app/components/shared/SessionItem/SessionItem.js b/frontend/app/components/shared/SessionItem/SessionItem.js index 64e4199ba..35434cf76 100644 --- a/frontend/app/components/shared/SessionItem/SessionItem.js +++ b/frontend/app/components/shared/SessionItem/SessionItem.js @@ -28,7 +28,7 @@ const SESSIONS_ROUTE = sessionsRoute(); // ) @connect(state => ({ timezone: state.getIn(['sessions', 'timezone']), - siteId: state.getIn([ 'user', 'siteId' ]), + siteId: state.getIn([ 'site', 'siteId' ]), }), { toggleFavorite, setSessionPath }) @withRouter export default class SessionItem extends React.PureComponent { diff --git a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx index 17904c1ba..8520af60e 100644 --- a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx +++ b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx @@ -11,9 +11,10 @@ interface Props { appliedFilter: any; edit: typeof edit; addFilter: typeof addFilter; + saveRequestPayloads: boolean; } function SessionSearch(props: Props) { - const { appliedFilter } = props; + const { appliedFilter, saveRequestPayloads = false } = props; const hasEvents = appliedFilter.filters.filter(i => i.isEvent).size > 0; const hasFilters = appliedFilter.filters.filter(i => !i.isEvent).size > 0; @@ -60,6 +61,7 @@ function SessionSearch(props: Props) { onUpdateFilter={onUpdateFilter} onRemoveFilter={onRemoveFilter} onChangeEventsOrder={onChangeEventsOrder} + saveRequestPayloads={saveRequestPayloads} />
@@ -82,5 +84,6 @@ function SessionSearch(props: Props) { } export default connect(state => ({ + saveRequestPayloads: state.getIn(['site', 'active', 'saveRequestPayloads']), appliedFilter: state.getIn([ 'search', 'instance' ]), }), { edit, addFilter })(SessionSearch); \ No newline at end of file diff --git a/frontend/app/components/shared/SiteDropdown/SiteDropdown.js b/frontend/app/components/shared/SiteDropdown/SiteDropdown.js index b83178512..5fc482389 100644 --- a/frontend/app/components/shared/SiteDropdown/SiteDropdown.js +++ b/frontend/app/components/shared/SiteDropdown/SiteDropdown.js @@ -5,7 +5,7 @@ const SiteDropdown = ({ contextName="", sites, onChange, value }) => { const options = sites.map(site => ({ value: site.id, text: site.host })).toJS(); return (