diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml
index eb193dbb7..9fe8c5611 100644
--- a/.github/workflows/api.yaml
+++ b/.github/workflows/api.yaml
@@ -1,5 +1,6 @@
# This action will push the chalice changes to aws
on:
+ workflow_dispatch:
push:
branches:
- api-v1.5.5
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index 8c5038f5f..990ce3c8a 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -1,5 +1,6 @@
name: Frontend FOSS Deployment
on:
+ workflow_dispatch:
push:
branches:
- dev
diff --git a/README.md b/README.md
index b943773e6..870d47fcc 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ For those who want to simply use OpenReplay as a service, [sign up](https://app.
Please refer to the [official OpenReplay documentation](https://docs.openreplay.com/). That should help you troubleshoot common issues. For additional help, you can reach out to us on one of these channels:
-- [Slack](https://slack.openreplay.com) (Connect with our engineers and community)
+- [Discord](https://discord.openreplay.com) (Connect with our engineers and community)
- [GitHub](https://github.com/openreplay/openreplay/issues) (Bug and issue reports)
- [Twitter](https://twitter.com/OpenReplayHQ) (Product updates, Great content)
- [Website chat](https://openreplay.com) (Talk to us)
@@ -80,7 +80,7 @@ We're always on the lookout for contributions to OpenReplay, and we're glad you'
See our [Contributing Guide](CONTRIBUTING.md) for more details.
-Also, feel free to join our [Slack](https://slack.openreplay.com) to ask questions, discuss ideas or connect with our contributors.
+Also, feel free to join our [Discord](https://discord.openreplay.com) to ask questions, discuss ideas or connect with our contributors.
## Roadmap
diff --git a/api/.env.default b/api/.env.default
index 6ae959a7d..0d4f8130c 100644
--- a/api/.env.default
+++ b/api/.env.default
@@ -37,6 +37,8 @@ pg_port=5432
pg_user=postgres
pg_timeout=30
pg_minconn=45
+PG_RETRY_MAX=50
+PG_RETRY_INTERVAL=2
put_S3_TTL=20
sentryURL=
sessions_bucket=mobs
diff --git a/api/Dockerfile b/api/Dockerfile
index 780518ff3..4526c32bd 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -5,6 +5,7 @@ WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env
+ENV APP_NAME chalice
# Add Tini
# Startup daemon
diff --git a/api/Dockerfile.alerts b/api/Dockerfile.alerts
index ed8f06eac..bdd1772ba 100644
--- a/api/Dockerfile.alerts
+++ b/api/Dockerfile.alerts
@@ -6,6 +6,7 @@ COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
ENV pg_minconn 2
+ENV APP_NAME alerts
# Add Tini
# Startup daemon
diff --git a/api/app.py b/api/app.py
index d261dadac..959f1ef8f 100644
--- a/api/app.py
+++ b/api/app.py
@@ -9,12 +9,11 @@ from starlette.responses import StreamingResponse
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from routers import core, core_dynamic
-from routers.app import v1_api
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
-from routers.subs import dashboard
+from routers.subs import dashboard, insights, metrics, v1_api
-app = FastAPI()
+app = FastAPI(root_path="/api")
@app.middleware('http')
@@ -54,7 +53,8 @@ app.include_router(core_dynamic.public_app)
app.include_router(core_dynamic.app)
app.include_router(core_dynamic.app_apikey)
app.include_router(dashboard.app)
-# app.include_router(insights.app)
+app.include_router(metrics.app)
+app.include_router(insights.app)
app.include_router(v1_api.app_apikey)
Schedule = AsyncIOScheduler()
diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py
index 33a859cc8..899fd046f 100644
--- a/api/chalicelib/core/authorizers.py
+++ b/api/chalicelib/core/authorizers.py
@@ -13,9 +13,9 @@ def jwt_authorizer(token):
try:
payload = jwt.decode(
token[1],
- config("jwt_secret"),
+ "",
algorithms=config("jwt_algorithm"),
- audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"]
+ audience=[ f"front:default-foss"]
)
except jwt.ExpiredSignatureError:
print("! JWT Expired signature")
diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py
index e0b0ed432..15c2ffc49 100644
--- a/api/chalicelib/core/custom_metrics.py
+++ b/api/chalicelib/core/custom_metrics.py
@@ -9,11 +9,11 @@ from chalicelib.utils.TimeUTC import TimeUTC
PIE_CHART_GROUP = 5
-def __try_live(project_id, data: schemas.CreateCustomMetricsSchema):
+def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
results = []
for i, s in enumerate(data.series):
- s.filter.startDate = data.startDate
- s.filter.endDate = data.endDate
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value))
@@ -42,7 +42,7 @@ def __try_live(project_id, data: schemas.CreateCustomMetricsSchema):
return results
-def merged_live(project_id, data: schemas.CreateCustomMetricsSchema):
+def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
return series_charts
@@ -54,13 +54,9 @@ def merged_live(project_id, data: schemas.CreateCustomMetricsSchema):
return results
-def __get_merged_metric(project_id, user_id, metric_id,
- data: Union[schemas.CustomMetricChartPayloadSchema,
- schemas.CustomMetricSessionsPayloadSchema]) \
+def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
+ schemas.CustomMetricSessionsPayloadSchema]) \
-> Union[schemas.CreateCustomMetricsSchema, None]:
- metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
- if metric is None:
- return None
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series:
@@ -71,11 +67,12 @@ def __get_merged_metric(project_id, user_id, metric_id,
return metric
-def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
- metric: schemas.CreateCustomMetricsSchema = __get_merged_metric(project_id=project_id, user_id=user_id,
- metric_id=metric_id, data=data)
+def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
+ if metric is None:
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
series_charts = __try_live(project_id=project_id, data=metric)
if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
return series_charts
@@ -88,21 +85,23 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
- metric: schemas.CreateCustomMetricsSchema = __get_merged_metric(project_id=project_id, user_id=user_id,
- metric_id=metric_id, data=data)
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None:
return None
results = []
for s in metric.series:
- s.filter.startDate = data.startDate
- s.filter.endDate = data.endDate
+ s.filter.startDate = data.startTimestamp
+ s.filter.endDate = data.endTimestamp
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})
return results
-def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
+def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
@@ -129,6 +128,8 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
query
)
r = cur.fetchone()
+ if dashboard:
+ return r["metric_id"]
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
@@ -147,10 +148,11 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
"metric_value": data.metric_value, "metric_format": data.metric_format}
for i, s in enumerate(data.series):
prefix = "u_"
+ if s.index is None:
+ s.index = i
if s.series_id is None or s.series_id not in series_ids:
n_series.append({"i": i, "s": s})
prefix = "n_"
- s.index = i
else:
u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id)
@@ -192,40 +194,60 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
SET name = %(name)s, is_public= %(is_public)s,
view_type= %(view_type)s, metric_type= %(metric_type)s,
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
- metric_format= %(metric_format)s
+ metric_format= %(metric_format)s,
+ edited_at = timezone('utc'::text, now())
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
- cur.execute(
- query
- )
+ cur.execute(query)
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
-def get_all(project_id, user_id):
+def get_all(project_id, user_id, include_series=False):
with pg_client.PostgresClient() as cur:
- cur.execute(
- cur.mogrify(
- """SELECT *
- FROM metrics
- LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
+ sub_join = ""
+ if include_series:
+ sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
- ) AS metric_series ON (TRUE)
+ ) AS metric_series ON (TRUE)"""
+ cur.execute(
+ cur.mogrify(
+ f"""SELECT *
+ FROM metrics
+ {sub_join}
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT DISTINCT dashboard_id, name, is_public
+ FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
+ WHERE deleted_at ISNULL
+ AND dashboard_widgets.metric_id = metrics.metric_id
+ AND project_id = %(project_id)s
+ AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)
+ LEFT JOIN LATERAL (SELECT email AS owner_email
+ FROM users
+ WHERE deleted_at ISNULL
+ AND users.user_id = metrics.user_id
+ ) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
- AND (user_id = %(user_id)s OR is_public)
- ORDER BY created_at;""",
+ AND (user_id = %(user_id)s OR metrics.is_public)
+ ORDER BY metrics.edited_at, metrics.created_at;""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
- for r in rows:
- r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
- for s in r["series"]:
- s["filter"] = helper.old_search_payload_to_flat(s["filter"])
+ if include_series:
+ for r in rows:
+ # r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ for s in r["series"]:
+ s["filter"] = helper.old_search_payload_to_flat(s["filter"])
+ else:
+ for r in rows:
+ r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
+ r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
rows = helper.list_to_camel_case(rows)
return rows
@@ -235,7 +257,7 @@ def delete(project_id, metric_id, user_id):
cur.execute(
cur.mogrify("""\
UPDATE public.metrics
- SET deleted_at = timezone('utc'::text, now())
+ SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s
AND (user_id = %(user_id)s OR is_public);""",
@@ -256,6 +278,18 @@ def get(metric_id, project_id, user_id, flatten=True):
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT dashboard_id, name, is_public
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(project_id)s
+ AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)
+ LEFT JOIN LATERAL (SELECT email AS owner_email
+ FROM users
+ WHERE deleted_at ISNULL
+ AND users.user_id = metrics.user_id
+ ) AS owner ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
@@ -268,12 +302,46 @@ def get(metric_id, project_id, user_id, flatten=True):
if row is None:
return None
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
+ row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"])
if flatten:
for s in row["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
return helper.dict_to_camel_case(row)
+def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
+ with pg_client.PostgresClient() as cur:
+ sub_query = ""
+ if include_dashboard:
+ sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
+ FROM (SELECT dashboard_id, name, is_public
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(project_id)s
+ AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
+ ) AS connected_dashboards ON (TRUE)"""
+ cur.execute(
+ cur.mogrify(
+ f"""SELECT *
+ FROM metrics
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ {sub_query}
+ WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL)
+ AND metrics.deleted_at ISNULL
+ AND (metrics.user_id = %(user_id)s OR metrics.is_public)
+ AND metrics.metric_id = %(metric_id)s
+ ORDER BY created_at;""",
+ {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
+ )
+ )
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py
index 9cd88eb6a..362e4c491 100644
--- a/api/chalicelib/core/dashboard.py
+++ b/api/chalicelib/core/dashboard.py
@@ -127,7 +127,6 @@ SESSIONS_META_FIELDS = {"revId": "rev_id",
"browser": "user_browser"}
-@dev.timed
def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -138,7 +137,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with pg_client.PostgresClient() as cur:
pg_query = f"""\
SELECT generated_timestamp AS timestamp,
- COALESCE(COUNT(sessions), 0) AS count
+ COALESCE(COUNT(sessions), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT 1
FROM public.sessions
@@ -151,7 +150,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
- "count": sum([r["count"] for r in rows]),
+ "value": sum([r["value"] for r in rows]),
"chart": rows
}
@@ -170,12 +169,11 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
count = cur.fetchone()["count"]
- results["countProgress"] = helper.__progress(old_val=count, new_val=results["count"])
-
+ results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.count
return results
-@dev.timed
def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -234,7 +232,6 @@ def __count_distinct_errors(cur, project_id, startTimestamp, endTimestamp, pg_su
return cur.fetchone()["count"]
-@dev.timed
def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -298,7 +295,6 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return rows
-@dev.timed
def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
@@ -316,7 +312,6 @@ def get_page_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return results
-@dev.timed
def __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
@@ -336,7 +331,6 @@ def __get_page_metrics(cur, project_id, startTimestamp, endTimestamp, **args):
return rows
-@dev.timed
def get_application_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
@@ -390,7 +384,6 @@ def __get_application_activity(cur, project_id, startTimestamp, endTimestamp, **
return result
-@dev.timed
def get_user_activity(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
with pg_client.PostgresClient() as cur:
@@ -423,7 +416,6 @@ def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args):
return row
-@dev.timed
def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -468,8 +460,9 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ORDER BY generated_timestamp) AS chart
) AS chart ON (TRUE);"""
- cur.execute(cur.mogrify(pg_query, {"step_size": step_size,"project_id": project_id, "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
+ cur.execute(
+ cur.mogrify(pg_query, {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
rows = cur.fetchall()
for i in range(len(rows)):
rows[i]["sessions"] = rows[i].pop("sessions_count")
@@ -478,7 +471,6 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return sorted(rows, key=lambda k: k["sessions"], reverse=True)
-@dev.timed
def __get_performance_constraint(l):
if len(l) == 0:
return ""
@@ -486,7 +478,6 @@ def __get_performance_constraint(l):
return f"AND ({' OR '.join(l)})"
-@dev.timed
def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
density=19, resources=None, **args):
step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
@@ -621,7 +612,6 @@ def __get_resource_db_type_from_type(resource_type):
return {v: k for k, v in RESOURCS_TYPE_TO_DB_TYPE.items()}.get(resource_type, resource_type)
-@dev.timed
def search(text, resource_type, project_id, performance=False, pages_only=False, events_only=False,
metadata=False, key=None, platform=None):
if not resource_type:
@@ -672,8 +662,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)} AND positionUTF8(url_path, %(value)s) != 0
LIMIT 10);"""
print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text.lower()),
- "platform_0": platform}))
+ "value": helper.string_to_sql_like(text.lower()),
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text.lower()),
"platform_0": platform}))
@@ -691,9 +681,9 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text),
- "resource_type": resource_type,
- "platform_0": platform}))
+ "value": helper.string_to_sql_like(text),
+ "resource_type": resource_type,
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"resource_type": resource_type,
@@ -709,8 +699,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text),
- "platform_0": platform}))
+ "value": helper.string_to_sql_like(text),
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@@ -723,8 +713,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text),
- "platform_0": platform}))
+ "value": helper.string_to_sql_like(text),
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@@ -737,8 +727,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text),
- "platform_0": platform}))
+ "value": helper.string_to_sql_like(text),
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
@@ -758,8 +748,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
print(cur.mogrify(pg_query,
- {"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key,
- "platform_0": platform}))
+ {"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key,
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text), "key": key,
"platform_0": platform}))
@@ -785,9 +775,9 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
LIMIT 10)""")
pg_query = " UNION ALL ".join(pg_query)
print(cur.mogrify(pg_query,
- {"project_id": project_id, "value": helper.string_to_sql_like(text),
- "key": key,
- "platform_0": platform}))
+ {"project_id": project_id, "value": helper.string_to_sql_like(text),
+ "key": key,
+ "platform_0": platform}))
cur.execute(cur.mogrify(pg_query,
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"key": key,
@@ -798,7 +788,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
return [helper.dict_to_camel_case(row) for row in rows]
-@dev.timed
def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -854,7 +843,6 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
return rows
-@dev.timed
def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -866,8 +854,6 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_sub_query_subset.append("resources.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp<%(endTimestamp)s")
-
-
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (SELECT resources.session_id,
resources.url_hostpath,
@@ -922,7 +908,6 @@ def dashboard_args(params):
return args
-@dev.timed
def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=19, type=None, url=None, **args):
@@ -971,7 +956,6 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
return {"avg": avg, "chart": rows}
-@dev.timed
def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=19, url=None, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -993,13 +977,13 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM public.sessions
INNER JOIN events.pages USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)})
- SELECT COALESCE(avg, 0) AS avg, chart
+ SELECT COALESCE(avg, 0) AS value, chart
FROM (SELECT AVG(dom_building_time) FROM pages) AS avg
LEFT JOIN
(SELECT jsonb_agg(chart) AS chart
FROM (
SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(dom_building_time), 0) AS avg
+ COALESCE(AVG(dom_building_time), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT pages.dom_building_time
FROM pages
@@ -1014,10 +998,10 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return row
-@dev.timed
def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), type="all", density=19, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1091,7 +1075,6 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return rows
-@dev.timed
def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1110,7 +1093,6 @@ def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return {"count": sum(i["count"] for i in rows), "chart": helper.list_to_camel_case(rows)}
-@dev.timed
def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1139,7 +1121,6 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
return {"avg": avg, "chart": helper.list_to_camel_case(rows)}
-@dev.timed
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, url=None, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1155,7 +1136,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
pg_sub_query_chart.append(f"url = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(pages.response_time),0) AS avg
+ COALESCE(AVG(pages.response_time),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT response_time
@@ -1176,10 +1157,9 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
- return {"avg": avg, "chart": rows}
+ return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
-@dev.timed
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=20, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1297,7 +1277,6 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
return result
-@dev.timed
def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1317,7 +1296,6 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1
return rows
-@dev.timed
def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1368,7 +1346,6 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return helper.dict_to_camel_case(row)
-@dev.timed
def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, url=None, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1384,11 +1361,11 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_query = f"""WITH pages AS(SELECT pages.visually_complete,pages.timestamp
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)})
- SELECT COALESCE((SELECT AVG(pages.visually_complete) FROM pages),0) AS avg,
+ SELECT COALESCE((SELECT AVG(pages.visually_complete) FROM pages),0) AS value,
jsonb_agg(chart) AS chart
FROM
(SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(visually_complete), 0) AS avg
+ COALESCE(AVG(visually_complete), 0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT pages.visually_complete
FROM pages
@@ -1402,10 +1379,10 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
return row
-@dev.timed
def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), value=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1444,7 +1421,6 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d
return rows
-@dev.timed
def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1454,7 +1430,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(performance.avg_used_js_heap_size),0) AS avg_used_js_heap_size
+ COALESCE(AVG(performance.avg_used_js_heap_size),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT avg_used_js_heap_size
@@ -1474,10 +1450,9 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
- return {"avgUsedJsHeapSize": avg, "chart": helper.list_to_camel_case(rows)}
+ return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.memory}
-@dev.timed
def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1487,7 +1462,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(performance.avg_cpu),0) AS avg_cpu
+ COALESCE(AVG(performance.avg_cpu),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT avg_cpu
@@ -1507,10 +1482,10 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
- return {"avgCpu": avg, "chart": helper.list_to_camel_case(rows)}
+ return {"value": avg, "chart": helper.list_to_camel_case(rows),
+ "unit": schemas.TemplatePredefinedUnits.percentage}
-@dev.timed
def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1520,7 +1495,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT generated_timestamp AS timestamp,
- COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS avg_fps
+ COALESCE(AVG(NULLIF(performance.avg_fps,0)),0) AS value
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (
SELECT avg_fps
@@ -1540,10 +1515,9 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
- return {"avgFps": avg, "chart": helper.list_to_camel_case(rows)}
+ return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.frame}
-@dev.timed
def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1628,7 +1602,6 @@ def __merge_rows_with_neutral(rows, neutral):
return rows
-@dev.timed
def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1679,7 +1652,6 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return result
-@dev.timed
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1720,7 +1692,6 @@ def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return rows
-@dev.timed
def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1769,7 +1740,6 @@ def __nested_array_to_dict_array(rows, key="url_host", value="count"):
return rows
-@dev.timed
def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1801,7 +1771,6 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return {"avg": avg, "partition": rows}
-@dev.timed
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1823,7 +1792,6 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return helper.list_to_camel_case(rows)
-@dev.timed
def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1866,7 +1834,6 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
return {"count": sum(i["count"] for i in rows), "chart": rows}
-@dev.timed
def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1892,7 +1859,6 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
return helper.list_to_camel_case(rows)
-@dev.timed
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1916,7 +1882,6 @@ def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return helper.list_to_camel_case(rows)
-@dev.timed
def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
@@ -1940,7 +1905,6 @@ def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return helper.list_to_camel_case(rows)
-@dev.timed
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -1952,7 +1916,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
pg_sub_query_subset.append("resources.status > 200")
pg_sub_query_subset_e = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors",
- time_constraint=False)
+ time_constraint=False)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False,
chart=True, data=args, main_table="", time_column="timestamp",
project=False, duration=False)
@@ -2005,7 +1969,6 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
return rows
-@dev.timed
def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -2060,7 +2023,6 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
return helper.list_to_camel_case(__merge_charts(response_end, actions))
-@dev.timed
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -2142,7 +2104,6 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
return {**row_sessions, **row_errors, "chart": chart}
-@dev.timed
def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -2193,7 +2154,6 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
return helper.list_to_camel_case(rows)
-@dev.timed
def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -2230,7 +2190,6 @@ def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_day
return rows
-@dev.timed
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
@@ -2284,3 +2243,491 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
rows = cur.fetchall()
return rows
+
+
+def __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+ pg_sub_query.append("resources.duration > 0")
+ pg_sub_query.append("resources.type= %(type)s")
+ pg_query = f"""\
+ SELECT COALESCE(AVG(resources.duration),0) AS value
+ FROM events.resources INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)};"""
+
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ return row
+
+
+def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ results = row
+ results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
+ img_constraints = []
+
+ img_constraints_vals = {}
+
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with pg_client.PostgresClient() as cur:
+ pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
+ chart=False, data=args)
+ pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
+ chart=True, data=args, main_table="resources", time_column="timestamp",
+ duration=False)
+ pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
+ pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
+
+ pg_query = f"""WITH resources AS (SELECT resources.duration, resources.timestamp
+ FROM events.resources INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query_subset)}
+ AND resources.type = 'img' AND resources.duration>0
+ {(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
+ )
+ SELECT generated_timestamp AS timestamp,
+ COALESCE(AVG(resources.duration),0) AS value
+ FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL (
+ SELECT resources.duration
+ FROM resources
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ ) AS resources ON (TRUE)
+ GROUP BY timestamp
+ ORDER BY timestamp;"""
+ cur.execute(cur.mogrify(pg_query, {**params, **img_constraints_vals, **__get_constraint_values(args)}))
+ rows = cur.fetchall()
+ rows = helper.list_to_camel_case(rows)
+
+ return rows
+
+
+def __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+ pg_sub_query.append("pages.timestamp >= %(startTimestamp)s")
+ pg_sub_query.append("pages.timestamp > %(endTimestamp)s")
+ pg_sub_query.append("pages.load_time > 0")
+ pg_sub_query.append("pages.load_time IS NOT NULL")
+ pg_query = f"""\
+ SELECT COALESCE(AVG(pages.load_time) ,0) AS value
+ FROM events.pages INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return row
+
+
+def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ results = row
+ results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
+ location_constraints = []
+ location_constraints_vals = {}
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with pg_client.PostgresClient() as cur:
+ pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
+ chart=False, data=args)
+ pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
+ chart=True, data=args, main_table="pages", time_column="timestamp",
+ duration=False)
+ pg_sub_query_subset.append("pages.timestamp >= %(startTimestamp)s")
+ pg_sub_query_subset.append("pages.timestamp < %(endTimestamp)s")
+ pg_query = f"""WITH pages AS(SELECT pages.load_time, timestamp
+ FROM events.pages INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL
+ {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
+ )
+ SELECT generated_timestamp AS timestamp,
+ COALESCE(AVG(pages.load_time),0) AS value
+ FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL ( SELECT pages.load_time
+ FROM pages
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
+ ) AS pages ON (TRUE)
+ GROUP BY generated_timestamp
+ ORDER BY generated_timestamp;"""
+ cur.execute(cur.mogrify(pg_query, {**params, **location_constraints_vals, **__get_constraint_values(args)}))
+ rows = cur.fetchall()
+ return rows
+
+
+def __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+ pg_sub_query.append("resources.duration > 0")
+ pg_sub_query.append("resources.type= %(type)s")
+ pg_query = f"""\
+ SELECT COALESCE(AVG(resources.duration),0) AS value
+ FROM events.resources INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)};"""
+
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}))
+
+ row = cur.fetchone()
+ return row
+
+
+def get_application_activity_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ results = row
+ results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density, factor=1)
+ location_constraints = []
+ img_constraints = []
+ request_constraints = []
+
+ img_constraints_vals = {}
+ location_constraints_vals = {}
+ request_constraints_vals = {}
+
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with pg_client.PostgresClient() as cur:
+ pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
+ chart=False, data=args)
+ pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
+ chart=True, data=args, main_table="resources", time_column="timestamp",
+ duration=False)
+ pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
+ pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
+
+ pg_query = f"""WITH resources AS(SELECT resources.duration, resources.timestamp
+ FROM events.resources INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query_subset)}
+ AND resources.type = 'fetch' AND resources.duration>0
+ {(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
+ )
+ SELECT generated_timestamp AS timestamp,
+ COALESCE(AVG(resources.duration),0) AS value
+ FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL (
+ SELECT resources.duration
+ FROM resources
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ ) AS resources ON (TRUE)
+ GROUP BY generated_timestamp
+ ORDER BY generated_timestamp;"""
+ cur.execute(cur.mogrify(pg_query, {**params, **request_constraints_vals, **__get_constraint_values(args)}))
+ rows = cur.fetchall()
+
+ return rows
+
+
+def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ rows = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def __get_page_metrics_avg_dom_content_load_start(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+ pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
+ pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
+ pg_sub_query.append("pages.dom_content_loaded_time > 0")
+ pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_time, 0)), 0) AS value
+ FROM (SELECT pages.dom_content_loaded_time
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ ) AS pages;"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+ cur.execute(cur.mogrify(pg_query, params))
+ rows = cur.fetchall()
+ return rows
+
+
+def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def __get_page_metrics_avg_first_contentful_pixel(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+ pg_sub_query.append("pages.timestamp>=%(startTimestamp)s")
+ pg_sub_query.append("pages.timestamp<%(endTimestamp)s")
+ pg_sub_query.append("pages.first_contentful_paint_time > 0")
+ pg_query = f"""SELECT COALESCE(AVG(NULLIF(pages.first_contentful_paint_time, 0)), 0) AS value
+ FROM (SELECT pages.first_contentful_paint_time
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ ) AS pages;"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+ cur.execute(cur.mogrify(pg_query, params))
+ rows = cur.fetchall()
+ return rows
+
+
+def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args)
+ results = helper.dict_to_camel_case(row)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args)
+
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.count
+ return results
+
+
+def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ pg_query = f"""\
+ SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS value
+ FROM public.sessions
+ WHERE {" AND ".join(pg_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return row
+
+
+def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with pg_client.PostgresClient() as cur:
+ row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args)
+ results = helper.dict_to_camel_case(row)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args)
+
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ results["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return results
+
+
+def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ pg_query = f"""\
+ SELECT COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS value
+ FROM public.sessions
+ WHERE {" AND ".join(pg_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return row
+
+
+def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COALESCE(AVG(pages.response_time), 0) AS value
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND pages.timestamp >= %(startTimestamp)s
+ AND pages.timestamp < %(endTimestamp)s
+ AND pages.response_time > 0;"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return helper.dict_to_camel_case(row)
+
+
+def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COALESCE(AVG(pages.first_paint_time), 0) AS value
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND pages.timestamp >= %(startTimestamp)s
+ AND pages.timestamp < %(endTimestamp)s
+ AND pages.first_paint_time > 0;"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return helper.dict_to_camel_case(row)
+
+
+def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_time), 0) AS value
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND pages.timestamp >= %(startTimestamp)s
+ AND pages.timestamp < %(endTimestamp)s
+ AND pages.dom_content_loaded_time > 0;"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return helper.dict_to_camel_case(row)
+
+
+def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COALESCE(AVG(pages.ttfb), 0) AS value
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND pages.timestamp >= %(startTimestamp)s
+ AND pages.timestamp < %(endTimestamp)s
+ AND pages.ttfb > 0;"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return helper.dict_to_camel_case(row)
+
+
+def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive), 0) AS value
+ FROM events.pages
+ INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)}
+ AND pages.timestamp >= %(startTimestamp)s
+ AND pages.timestamp < %(endTimestamp)s
+ AND pages.time_to_interactive > 0;"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.millisecond
+ return helper.dict_to_camel_case(row)
+
+
+def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ pg_sub_query = __get_constraints(project_id=project_id, data=args)
+
+ if value is not None:
+ pg_sub_query.append("pages.path = %(value)s")
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT COUNT(pages.session_id) AS value
+ FROM events.pages INNER JOIN public.sessions USING (session_id)
+ WHERE {" AND ".join(pg_sub_query)};"""
+ cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)}))
+ row = cur.fetchone()
+ row["unit"] = schemas.TemplatePredefinedUnits.count
+ return helper.dict_to_camel_case(row)
diff --git a/api/chalicelib/core/dashboards2.py b/api/chalicelib/core/dashboards2.py
new file mode 100644
index 000000000..a66324532
--- /dev/null
+++ b/api/chalicelib/core/dashboards2.py
@@ -0,0 +1,309 @@
+import json
+
+import schemas
+from chalicelib.core import custom_metrics, dashboard
+from chalicelib.utils import helper
+from chalicelib.utils import pg_client
+from chalicelib.utils.TimeUTC import TimeUTC
+
+CATEGORY_DESCRIPTION = {
+ 'overview': 'lorem ipsum',
+ 'custom': 'lorem cusipsum',
+ 'errors': 'lorem erripsum',
+ 'performance': 'lorem perfipsum',
+ 'resources': 'lorem resipsum'
+}
+
+
+def get_templates(project_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
+ FROM (SELECT * , default_config AS config
+ FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE deleted_at IS NULL
+ AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
+ ) AS metrics
+ GROUP BY category
+ ORDER BY category;""", {"project_id": project_id, "userId": user_id})
+ cur.execute(pg_query)
+ rows = cur.fetchall()
+ for r in rows:
+ r["description"] = CATEGORY_DESCRIPTION.get(r["category"], "")
+ for w in r["widgets"]:
+ w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
+ w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
+ return helper.list_to_camel_case(rows)
+
+
+def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned)
+ VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s)
+ RETURNING *"""
+ params = {"userId": user_id, "projectId": project_id, **data.dict()}
+ if data.metrics is not None and len(data.metrics) > 0:
+ pg_query = f"""WITH dash AS ({pg_query})
+ INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
+ RETURNING (SELECT dashboard_id FROM dash)"""
+ for i, m in enumerate(data.metrics):
+ params[f"metric_id_{i}"] = m
+ # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
+ # .get("properties", {}).get("config", {}).get("default", {})
+ # params[f"config_{i}"]["position"] = i
+ # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
+ params[f"config_{i}"] = json.dumps({"position": i})
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ if row is None:
+ return {"errors": ["something went wrong while creating the dashboard"]}
+ return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
+
+
+def get_dashboards(project_id, user_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""SELECT *
+ FROM dashboards
+ WHERE deleted_at ISNULL
+ AND project_id = %(projectId)s
+ AND (user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ rows = cur.fetchall()
+ return helper.list_to_camel_case(rows)
+
+
+def get_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
+ FROM dashboards
+ LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
+ FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
+ FROM metrics
+ INNER JOIN dashboard_widgets USING (metric_id)
+ LEFT JOIN LATERAL (SELECT JSONB_AGG(metric_series.* ORDER BY index) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
+ AND metrics.deleted_at ISNULL
+ AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
+ ) AS all_metric_widgets ON (TRUE)
+ WHERE dashboards.deleted_at ISNULL
+ AND dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ if row is not None:
+ for w in row["widgets"]:
+ row["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
+ row["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
+ return helper.dict_to_camel_case(row)
+
+
+def delete_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboards
+ SET deleted_at = timezone('utc'::text, now())
+ WHERE dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ return {"data": {"success": True}}
+
+
+def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = f"""UPDATE dashboards
+ SET name = %(name)s
+ {", is_public = %(is_public)s" if data.is_public is not None else ""}
+ {", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
+ WHERE dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public)"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
+ if data.metrics is not None and len(data.metrics) > 0:
+ pg_query = f"""WITH dash AS ({pg_query})
+ INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
+ for i, m in enumerate(data.metrics):
+ params[f"metric_id_{i}"] = m
+ # params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
+ # .get("properties", {}).get("config", {}).get("default", {})
+ # params[f"config_{i}"]["position"] = i
+ # params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
+ params[f"config_{i}"] = json.dumps({"position": i})
+
+ cur.execute(cur.mogrify(pg_query, params))
+
+ return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
+
+
+def get_widget(project_id, user_id, dashboard_id, widget_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """SELECT metrics.*, metric_series.series
+ FROM dashboard_widgets
+ INNER JOIN dashboards USING (dashboard_id)
+ INNER JOIN metrics USING (metric_id)
+ LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
+ FROM metric_series
+ WHERE metric_series.metric_id = metrics.metric_id
+ AND metric_series.deleted_at ISNULL
+ ) AS metric_series ON (TRUE)
+ WHERE dashboard_id = %(dashboard_id)s
+ AND widget_id = %(widget_id)s
+ AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
+ AND dashboards.deleted_at IS NULL
+ AND metrics.deleted_at ISNULL
+ AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
+ AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
+ SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
+ %(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
+ WHERE EXISTS(SELECT 1 FROM dashboards
+ WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
+ AND dashboard_id = %(dashboard_id)s
+ AND (dashboards.user_id = %(userId)s OR is_public))
+ RETURNING *;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
+ params["config"] = json.dumps(data.config)
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboard_widgets
+ SET config= %(config)s
+ WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
+ RETURNING *;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
+ "widget_id": widget_id, **data.dict()}
+ params["config"] = json.dumps(data.config)
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def remove_widget(project_id, user_id, dashboard_id, widget_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """DELETE FROM dashboard_widgets
+ WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
+ params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ return {"data": {"success": True}}
+
+
+def pin_dashboard(project_id, user_id, dashboard_id):
+ with pg_client.PostgresClient() as cur:
+ pg_query = """UPDATE dashboards
+ SET is_pinned = FALSE
+ WHERE project_id=%(project_id)s;
+ UPDATE dashboards
+ SET is_pinned = True
+ WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
+ RETURNING *;"""
+ params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
+ cur.execute(cur.mogrify(pg_query, params))
+ row = cur.fetchone()
+ return helper.dict_to_camel_case(row)
+
+
+def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
+ metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
+ return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
+ data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
+
+
+PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: dashboard.get_processed_sessions,
+ schemas.TemplatePredefinedKeys.avg_image_load_time: dashboard.get_application_activity_avg_image_load_time,
+ schemas.TemplatePredefinedKeys.avg_page_load_time: dashboard.get_application_activity_avg_page_load_time,
+ schemas.TemplatePredefinedKeys.avg_request_load_time: dashboard.get_application_activity_avg_request_load_time,
+ schemas.TemplatePredefinedKeys.avg_dom_content_load_start: dashboard.get_page_metrics_avg_dom_content_load_start,
+ schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: dashboard.get_page_metrics_avg_first_contentful_pixel,
+ schemas.TemplatePredefinedKeys.avg_visited_pages: dashboard.get_user_activity_avg_visited_pages,
+ schemas.TemplatePredefinedKeys.avg_session_duration: dashboard.get_user_activity_avg_session_duration,
+ schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: dashboard.get_pages_dom_build_time,
+ schemas.TemplatePredefinedKeys.avg_pages_response_time: dashboard.get_pages_response_time,
+ schemas.TemplatePredefinedKeys.avg_response_time: dashboard.get_top_metrics_avg_response_time,
+ schemas.TemplatePredefinedKeys.avg_first_paint: dashboard.get_top_metrics_avg_first_paint,
+ schemas.TemplatePredefinedKeys.avg_dom_content_loaded: dashboard.get_top_metrics_avg_dom_content_loaded,
+ schemas.TemplatePredefinedKeys.avg_till_first_bit: dashboard.get_top_metrics_avg_till_first_bit,
+ schemas.TemplatePredefinedKeys.avg_time_to_interactive: dashboard.get_top_metrics_avg_time_to_interactive,
+ schemas.TemplatePredefinedKeys.count_requests: dashboard.get_top_metrics_count_requests,
+ schemas.TemplatePredefinedKeys.avg_time_to_render: dashboard.get_time_to_render,
+ schemas.TemplatePredefinedKeys.avg_used_js_heap_size: dashboard.get_memory_consumption,
+ schemas.TemplatePredefinedKeys.avg_cpu: dashboard.get_avg_cpu,
+ schemas.TemplatePredefinedKeys.avg_fps: dashboard.get_avg_fps,
+ schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: dashboard.get_impacted_sessions_by_js_errors,
+ schemas.TemplatePredefinedKeys.domains_errors_4xx: dashboard.get_domains_errors_4xx,
+ schemas.TemplatePredefinedKeys.domains_errors_5xx: dashboard.get_domains_errors_5xx,
+ schemas.TemplatePredefinedKeys.errors_per_domains: dashboard.get_errors_per_domains,
+ schemas.TemplatePredefinedKeys.calls_errors: dashboard.get_calls_errors,
+ schemas.TemplatePredefinedKeys.errors_by_type: dashboard.get_errors_per_type,
+ schemas.TemplatePredefinedKeys.errors_by_origin: dashboard.get_resources_by_party,
+ schemas.TemplatePredefinedKeys.speed_index_by_location: dashboard.get_speed_index_location,
+ schemas.TemplatePredefinedKeys.slowest_domains: dashboard.get_slowest_domains,
+ schemas.TemplatePredefinedKeys.sessions_per_browser: dashboard.get_sessions_per_browser,
+ schemas.TemplatePredefinedKeys.time_to_render: dashboard.get_time_to_render,
+ schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: dashboard.get_impacted_sessions_by_slow_pages,
+ schemas.TemplatePredefinedKeys.memory_consumption: dashboard.get_memory_consumption,
+ schemas.TemplatePredefinedKeys.cpu_load: dashboard.get_avg_cpu,
+ schemas.TemplatePredefinedKeys.frame_rate: dashboard.get_avg_fps,
+ schemas.TemplatePredefinedKeys.crashes: dashboard.get_crashes,
+ schemas.TemplatePredefinedKeys.resources_vs_visually_complete: dashboard.get_resources_vs_visually_complete,
+ schemas.TemplatePredefinedKeys.pages_dom_buildtime: dashboard.get_pages_dom_build_time,
+ schemas.TemplatePredefinedKeys.pages_response_time: dashboard.get_pages_response_time,
+ schemas.TemplatePredefinedKeys.pages_response_time_distribution: dashboard.get_pages_response_time_distribution,
+ schemas.TemplatePredefinedKeys.missing_resources: dashboard.get_missing_resources_trend,
+ schemas.TemplatePredefinedKeys.slowest_resources: dashboard.get_slowest_resources,
+ schemas.TemplatePredefinedKeys.resources_fetch_time: dashboard.get_resources_loading_time,
+ schemas.TemplatePredefinedKeys.resource_type_vs_response_end: dashboard.resource_type_vs_response_end,
+ schemas.TemplatePredefinedKeys.resources_count_by_type: dashboard.get_resources_count_by_type,
+ }
+
+
+def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
+ return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
+
+
+def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
+ raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
+ include_dashboard=False)
+ if raw_metric is None:
+ return None
+ metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
+ if metric.is_template:
+ return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
+ else:
+ return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
+ metric=raw_metric)
+
+
+def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
+ raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
+ if raw_metric is None:
+ return None
+ metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
+ if metric.is_template:
+ return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
+ else:
+ return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
+ data=data, metric=raw_metric)
diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py
index 08adfd3ca..5b3894606 100644
--- a/api/chalicelib/core/insights.py
+++ b/api/chalicelib/core/insights.py
@@ -1,11 +1,8 @@
import schemas
-from chalicelib.core import sessions_metas
+from chalicelib.core.dashboard import __get_constraints, __get_constraint_values
from chalicelib.utils import helper, dev
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
-from chalicelib.utils.metrics_helper import __get_step_size
-import math
-from chalicelib.core.dashboard import __get_constraints, __get_constraint_values
def __transform_journey(rows):
@@ -930,4 +927,4 @@ def search(text, feature_type, project_id, platform=None):
rows = cur.fetchall()
else:
return []
- return [helper.dict_to_camel_case(row) for row in rows]
\ No newline at end of file
+ return [helper.dict_to_camel_case(row) for row in rows]
diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py
index ea9c6c24e..7d8c956cf 100644
--- a/api/chalicelib/core/integration_jira_cloud.py
+++ b/api/chalicelib/core/integration_jira_cloud.py
@@ -15,10 +15,17 @@ class JIRAIntegration(integration_base.BaseIntegration):
# TODO: enable super-constructor when OAuth is done
# super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy)
self._user_id = user_id
- i = self.get()
- if i is None:
+ self.integration = self.get()
+ if self.integration is None:
return
- self.issue_handler = JIRACloudIntegrationIssue(token=i["token"], username=i["username"], url=i["url"])
+ self.integration["valid"] = True
+ try:
+ self.issue_handler = JIRACloudIntegrationIssue(token=self.integration["token"],
+ username=self.integration["username"],
+ url=self.integration["url"])
+ except Exception as e:
+ self.issue_handler = None
+ self.integration["valid"] = False
@property
def provider(self):
@@ -37,10 +44,10 @@ class JIRAIntegration(integration_base.BaseIntegration):
return helper.dict_to_camel_case(cur.fetchone())
def get_obfuscated(self):
- integration = self.get()
- if integration is None:
+ if self.integration is None:
return None
- integration["token"] = obfuscate_string(integration["token"])
+ integration = dict(self.integration)
+ integration["token"] = obfuscate_string(self.integration["token"])
integration["provider"] = self.provider.lower()
return integration
@@ -90,14 +97,13 @@ class JIRAIntegration(integration_base.BaseIntegration):
return {"state": "success"}
def add_edit(self, data):
- s = self.get()
- if s is not None:
+ if self.integration is not None:
return self.update(
changes={
"username": data["username"],
"token": data["token"] \
if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \
- else s["token"],
+ else self.integration["token"],
"url": data["url"]
},
obfuscate=True
diff --git a/api/chalicelib/core/integrations_manager.py b/api/chalicelib/core/integrations_manager.py
index fca271870..ef63a7d96 100644
--- a/api/chalicelib/core/integrations_manager.py
+++ b/api/chalicelib/core/integrations_manager.py
@@ -36,7 +36,10 @@ def get_integration(tenant_id, user_id, tool=None):
if tool not in SUPPORTED_TOOLS:
return {"errors": [f"issue tracking tool not supported yet, available: {SUPPORTED_TOOLS}"]}, None
if tool == integration_jira_cloud.PROVIDER:
- return None, integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id)
+ integration = integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id)
+ if integration.integration is not None and not integration.integration.get("valid", True):
+ return {"errors": ["JIRA: connexion issue/unauthorized"]}, integration
+ return None, integration
elif tool == integration_github.PROVIDER:
return None, integration_github.GitHubIntegration(tenant_id=tenant_id, user_id=user_id)
return {"errors": ["lost integration"]}, None
diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py
index c57360dc4..3559f645a 100644
--- a/api/chalicelib/core/projects.py
+++ b/api/chalicelib/core/projects.py
@@ -57,7 +57,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
cur.execute(f"""\
SELECT
- s.project_id, s.name, s.project_key
+ s.project_id, s.name, s.project_key, s.save_request_payloads
{',s.gdpr' if gdpr else ''}
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
@@ -65,27 +65,26 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
FROM public.projects AS s
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
WHERE s.deleted_at IS NULL
- ORDER BY s.project_id;"""
- )
+ ORDER BY s.project_id;""")
rows = cur.fetchall()
if recording_state:
project_ids = [f'({r["project_id"]})' for r in rows]
- query = f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
- FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
- LEFT JOIN sessions USING (project_id)
- GROUP BY project_id;"""
- cur.execute(
- query=query
- )
+ query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
+ FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
+ LEFT JOIN sessions USING (project_id)
+ WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
+ GROUP BY project_id;""",
+ {"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
+
+ cur.execute(query=query)
status = cur.fetchall()
for r in rows:
+ r["status"] = "red"
for s in status:
if s["project_id"] == r["project_id"]:
- if s["last"] < TimeUTC.now(-2):
- r["status"] = "red"
- elif s["last"] < TimeUTC.now(-1):
+ if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1):
r["status"] = "yellow"
- else:
+ elif s["last"] >= TimeUTC.now(-1):
r["status"] = "green"
break
@@ -109,7 +108,8 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
SELECT
s.project_id,
s.project_key,
- s.name
+ s.name,
+ s.save_request_payloads
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
{tracker_query}
diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py
index 8f619f66b..83cad2ad7 100644
--- a/api/chalicelib/core/sessions.py
+++ b/api/chalicelib/core/sessions.py
@@ -39,7 +39,8 @@ def __group_metadata(session, project_metadata):
return meta
-def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False):
+def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False,
+ live=True):
with pg_client.PostgresClient() as cur:
extra_query = []
if include_fav_viewed:
@@ -97,9 +98,9 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id)
- data['live'] = assist.is_live(project_id=project_id,
- session_id=session_id,
- project_key=data["projectKey"])
+ data['live'] = live and assist.is_live(project_id=project_id,
+ session_id=session_id,
+ project_key=data["projectKey"])
data["inDB"] = True
return data
else:
diff --git a/api/chalicelib/utils/TimeUTC.py b/api/chalicelib/utils/TimeUTC.py
index bac7a027f..d399e1651 100644
--- a/api/chalicelib/utils/TimeUTC.py
+++ b/api/chalicelib/utils/TimeUTC.py
@@ -88,13 +88,18 @@ class TimeUTC:
return datetime.utcfromtimestamp(ts // 1000).strftime(fmt)
@staticmethod
- def human_to_timestamp(ts, pattern):
+ def human_to_timestamp(ts, pattern="%Y-%m-%dT%H:%M:%S.%f"):
return int(datetime.strptime(ts, pattern).timestamp() * 1000)
@staticmethod
def datetime_to_timestamp(date):
if date is None:
return None
+ if isinstance(date, str):
+ fp = date.find(".")
+ if fp > 0:
+ date += '0' * (6 - len(date[fp + 1:]))
+ date = datetime.fromisoformat(date)
return int(datetime.timestamp(date) * 1000)
@staticmethod
diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py
index d3b637373..b1734660c 100644
--- a/api/chalicelib/utils/jira_client.py
+++ b/api/chalicelib/utils/jira_client.py
@@ -5,22 +5,24 @@ import requests
from jira import JIRA
from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
+from starlette import status
+from starlette.exceptions import HTTPException
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
class JiraManager:
- # retries = 5
retries = 0
def __init__(self, url, username, password, project_id=None):
self._config = {"JIRA_PROJECT_ID": project_id, "JIRA_URL": url, "JIRA_USERNAME": username,
"JIRA_PASSWORD": password}
try:
- self._jira = JIRA({'server': url}, basic_auth=(username, password), logging=True, max_retries=1)
+ self._jira = JIRA(url, basic_auth=(username, password), logging=True, max_retries=1)
except Exception as e:
print("!!! JIRA AUTH ERROR")
print(e)
+ raise e
def set_jira_project_id(self, project_id):
self._config["JIRA_PROJECT_ID"] = project_id
@@ -33,8 +35,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_projects()
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
projects_dict_list = []
for project in projects:
projects_dict_list.append(self.__parser_project_info(project))
@@ -49,8 +51,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_project()
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_project_info(project)
def get_issues(self, sql: str, offset: int = 0):
@@ -65,8 +67,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_issues(sql, offset)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
issue_dict_list = []
for issue in issues:
@@ -85,8 +87,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_issue(issue_id)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_issue_info(issue)
def get_issue_v3(self, issue_id: str):
@@ -105,8 +107,8 @@ class JiraManager:
if self.retries > 0:
time.sleep(1)
return self.get_issue_v3(issue_id)
- print(f"=>Error {e}")
- raise e
+ print(f"=>Exception {e}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: get issue error")
return self.__parser_issue_info(issue.json())
def create_issue(self, issue_dict):
@@ -119,8 +121,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.create_issue(issue_dict)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
def close_issue(self, issue):
try:
@@ -131,8 +133,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.close_issue(issue)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
def assign_issue(self, issue_id, account_id) -> bool:
try:
@@ -142,8 +144,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.assign_issue(issue_id, account_id)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
def add_comment(self, issue_id: str, comment: str):
try:
@@ -153,8 +155,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.add_comment(issue_id, comment)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
return self.__parser_comment_info(comment)
def add_comment_v3(self, issue_id: str, comment: str):
@@ -190,8 +192,8 @@ class JiraManager:
if self.retries > 0:
time.sleep(1)
return self.add_comment_v3(issue_id, comment)
- print(f"=>Error {e}")
- raise e
+ print(f"=>Exception {e}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: comment error")
return self.__parser_comment_info(comment_response.json())
def get_comments(self, issueKey):
@@ -206,8 +208,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_comments(issueKey)
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
def get_meta(self):
meta = {}
@@ -217,14 +219,16 @@ class JiraManager:
def get_assignable_users(self):
try:
- users = self._jira.search_assignable_users_for_issues('', project=self._config['JIRA_PROJECT_ID'])
+ users = self._jira.search_assignable_users_for_issues(project=self._config['JIRA_PROJECT_ID'], query="*")
except JIRAError as e:
self.retries -= 1
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_assignable_users()
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ if e.status_code == 401:
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="JIRA: 401 Unauthorized")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
users_dict = []
for user in users:
users_dict.append({
@@ -244,8 +248,8 @@ class JiraManager:
if (e.status_code // 100) == 4 and self.retries > 0:
time.sleep(1)
return self.get_issue_types()
- print(f"=>Error {e.text}")
- raise e
+ print(f"=>Exception {e.text}")
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
types_dict = []
for type in types:
if not type.subtask and not type.name.lower() == "epic":
diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py
index 6e4118689..3d60dda5c 100644
--- a/api/chalicelib/utils/pg_client.py
+++ b/api/chalicelib/utils/pg_client.py
@@ -1,3 +1,4 @@
+import time
from threading import Semaphore
import psycopg2
@@ -9,7 +10,8 @@ _PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
- "port": config("pg_port", cast=int)}
+ "port": config("pg_port", cast=int),
+ "application_name": config("APP_NAME", default="PY")}
PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
@@ -36,9 +38,14 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
postgreSQL_pool: ORThreadedConnectionPool = None
+RETRY_MAX = config("PG_RETRY_MAX", cast=int, default=50)
+RETRY_INTERVAL = config("PG_RETRY_INTERVAL", cast=int, default=2)
+RETRY = 0
+
def make_pool():
global postgreSQL_pool
+ global RETRY
if postgreSQL_pool is not None:
try:
postgreSQL_pool.closeall()
@@ -50,7 +57,13 @@ def make_pool():
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
print("Error while connecting to PostgreSQL", error)
- raise error
+ if RETRY < RETRY_MAX:
+ RETRY += 1
+ print(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
+ time.sleep(RETRY_INTERVAL)
+ make_pool()
+ else:
+ raise error
make_pool()
@@ -64,6 +77,8 @@ class PostgresClient:
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
+ long_config = dict(_PG_CONFIG)
+ long_config["application_name"] += "-LONG"
self.connection = psycopg2.connect(**_PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()
diff --git a/api/requirements.txt b/api/requirements.txt
index 4af962f4f..198b535dd 100644
--- a/api/requirements.txt
+++ b/api/requirements.txt
@@ -4,11 +4,11 @@ boto3==1.16.1
pyjwt==1.7.1
psycopg2-binary==2.8.6
elasticsearch==7.9.1
-jira==2.0.0
+jira==3.1.1
-fastapi==0.74.1
+fastapi==0.75.0
uvicorn[standard]==0.17.5
python-decouple==3.6
pydantic[email]==1.8.2
diff --git a/api/routers/core.py b/api/routers/core.py
index 97a749429..06743c054 100644
--- a/api/routers/core.py
+++ b/api/routers/core.py
@@ -21,6 +21,7 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
+@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
@@ -36,6 +37,7 @@ def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.Cu
}
+@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"])
def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
@@ -44,6 +46,7 @@ def add_remove_favorite_session2(projectId: int, sessionId: int,
session_id=sessionId)}
+@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
@@ -56,6 +59,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
}
+@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"])
@app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"])
def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
@@ -67,6 +71,7 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
}
+@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"])
@app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"])
def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
@@ -79,6 +84,8 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
}
+@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
+@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
@@ -387,7 +394,7 @@ def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(O
def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
- if error is not None:
+ if error is not None and integration is None:
return {"data": {}}
return {"data": integration.get_obfuscated()}
@@ -399,7 +406,7 @@ def add_edit_jira_cloud(data: schemas.JiraGithubSchema = Body(...),
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
tenant_id=context.tenant_id,
user_id=context.user_id)
- if error is not None:
+ if error is not None and integration is None:
return error
data.provider = integration_jira_cloud.PROVIDER
return {"data": integration.add_edit(data=data.dict())}
@@ -422,7 +429,7 @@ def add_edit_github(data: schemas.JiraGithubSchema = Body(...),
def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
- if error is not None:
+ if error is not None and integration is None:
return error
return {"data": integration.delete()}
@@ -825,6 +832,19 @@ def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentCo
return {'data': data}
+@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"])
+def get_live_session(projectId: int, sessionId: str, context: schemas.CurrentContext = Depends(OR_context)):
+ data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
+ if data is None:
+ data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
+ user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False)
+ if data is None:
+ return {"errors": ["session not found"]}
+ if data.get("inDB"):
+ sessions_favorite_viewed.view_session(project_id=projectId, user_id=context.user_id, session_id=sessionId)
+ return {'data': data}
+
+
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@@ -1065,78 +1085,6 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
user_id=context.user_id)
-@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
-@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
-def try_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- return {"data": custom_metrics.merged_live(project_id=projectId, data=data)}
-
-
-@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
-@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
-def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
-
-
-@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
-def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
- return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
-
-
-@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
-def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
- data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
- if data is None:
- return {"errors": ["custom metric not found"]}
- return {"data": data}
-
-
-@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
-def get_custom_metric_sessions(projectId: int, metric_id: int,
- data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
- if data is None:
- return {"errors": ["custom metric not found"]}
- return {"data": data}
-
-
-@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
-def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- data = custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
- data=data)
- if data is None:
- return {"errors": ["custom metric not found"]}
- return {"data": data}
-
-
-@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
-@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
-def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
- if data is None:
- return {"errors": ["custom metric not found"]}
- return {"data": data}
-
-
-@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
-@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
-def update_custom_metric_state(projectId: int, metric_id: int,
- data: schemas.UpdateCustomMetricsStatusSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- return {
- "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
- status=data.active)}
-
-
-@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
-def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
- return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
-
-
@app.post('/{projectId}/saved_search', tags=["savedSearch"])
@app.put('/{projectId}/saved_search', tags=["savedSearch"])
def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...),
diff --git a/api/routers/subs/dashboard.py b/api/routers/subs/dashboard.py
index 169893693..e2d4ba268 100644
--- a/api/routers/subs/dashboard.py
+++ b/api/routers/subs/dashboard.py
@@ -325,22 +325,73 @@ def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPa
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
- return {"data": [
- *helper.explode_widget(key="count_sessions",
- data=dashboard.get_processed_sessions(project_id=projectId, **data.dict())),
+ results = [
+ {"key": "count_sessions",
+ "data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()),
"chart": dashboard.get_performance(project_id=projectId, **data.dict())
.get("chart", [])}),
*helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())),
- *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()),
- key="avg_pages_dom_buildtime"),
- *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()),
- key="avg_pages_response_time"),
+ {"key": "avg_pages_dom_buildtime",
+ "data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())},
+ {"key": "avg_pages_response_time",
+ "data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())
+ },
*helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())),
- *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()),
- key="avg_time_to_render"),
- *helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **data.dict())),
- *helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **data.dict())),
- *helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **data.dict())),
- ]}
+ {"key": "avg_time_to_render", "data": dashboard.get_time_to_render(project_id=projectId, **data.dict())},
+ {"key": "avg_used_js_heap_size", "data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())},
+ {"key": "avg_cpu", "data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_fps, "data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
+ ]
+ results = sorted(results, key=lambda r: r["key"])
+ return {"data": results}
+
+
+@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
+@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
+def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
+ results = [
+ {"key": schemas.TemplatePredefinedKeys.count_sessions,
+ "data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_image_load_time,
+ "data": dashboard.get_application_activity_avg_image_load_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_page_load_time,
+ "data": dashboard.get_application_activity_avg_page_load_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_request_load_time,
+ "data": dashboard.get_application_activity_avg_request_load_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_dom_content_load_start,
+ "data": dashboard.get_page_metrics_avg_dom_content_load_start(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_first_contentful_pixel,
+ "data": dashboard.get_page_metrics_avg_first_contentful_pixel(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_visited_pages,
+ "data": dashboard.get_user_activity_avg_visited_pages(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_session_duration,
+ "data": dashboard.get_user_activity_avg_session_duration(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime,
+ "data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_pages_response_time,
+ "data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_response_time,
+ "data": dashboard.get_top_metrics_avg_response_time(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_first_paint,
+ "data": dashboard.get_top_metrics_avg_first_paint(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_dom_content_loaded,
+ "data": dashboard.get_top_metrics_avg_dom_content_loaded(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_till_first_bit,
+ "data": dashboard.get_top_metrics_avg_till_first_bit(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_time_to_interactive,
+ "data": dashboard.get_top_metrics_avg_time_to_interactive(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.count_requests,
+ "data": dashboard.get_top_metrics_count_requests(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_time_to_render,
+ "data": dashboard.get_time_to_render(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_used_js_heap_size,
+ "data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_cpu,
+ "data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())},
+ {"key": schemas.TemplatePredefinedKeys.avg_fps,
+ "data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
+ ]
+ results = sorted(results, key=lambda r: r["key"])
+ return {"data": results}
diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py
new file mode 100644
index 000000000..0a806b146
--- /dev/null
+++ b/api/routers/subs/metrics.py
@@ -0,0 +1,181 @@
+from fastapi import Body, Depends
+
+import schemas
+from chalicelib.core import dashboards2, custom_metrics
+from or_dependencies import OR_context
+from routers.base import get_routers
+
+public_app, app, app_apikey = get_routers()
+
+
+@app.post('/{projectId}/dashboards', tags=["dashboard"])
+@app.put('/{projectId}/dashboards', tags=["dashboard"])
+def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return dashboards2.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
+
+
+@app.get('/{projectId}/dashboards', tags=["dashboard"])
+def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.get_dashboards(project_id=projectId, user_id=context.user_id)}
+
+
+@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
+def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ data = dashboards2.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
+ if data is None:
+ return {"errors": ["dashboard not found"]}
+ return {"data": data}
+
+
+@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
+@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
+def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.update_dashboard(project_id=projectId, user_id=context.user_id,
+ dashboard_id=dashboardId, data=data)}
+
+
+@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
+def delete_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return dashboards2.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
+
+
+@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"])
+def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
+
+
+@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
+@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
+def add_widget_to_dashboard(projectId: int, dashboardId: int,
+ data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
+ data=data)}
+
+
+@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
+@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
+def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
+ data: schemas.CreateCustomMetricsSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
+ dashboard_id=dashboardId, data=data)}
+
+
+@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
+@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
+def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
+ data: schemas.UpdateWidgetPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return dashboards2.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
+ widget_id=widgetId, data=data)
+
+
+@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
+def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int,
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return dashboards2.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
+ widget_id=widgetId)
+
+
+@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}/chart', tags=["dashboard"])
+def get_widget_chart(projectId: int, dashboardId: int, widgetId: int,
+ data: schemas.CustomMetricChartPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ data = dashboards2.make_chart_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
+ widget_id=widgetId, data=data)
+ if data is None:
+ return {"errors": ["widget not found"]}
+ return {"data": data}
+
+
+@app.get('/{projectId}/metrics/templates', tags=["dashboard"])
+def get_templates(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": dashboards2.get_templates(project_id=projectId, user_id=context.user_id)}
+
+
+@app.post('/{projectId}/metrics/try', tags=["dashboard"])
+@app.put('/{projectId}/metrics/try', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
+@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
+def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": custom_metrics.merged_live(project_id=projectId, data=data)}
+
+
+@app.post('/{projectId}/metrics', tags=["dashboard"])
+@app.put('/{projectId}/metrics', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
+@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
+def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data)
+
+
+@app.get('/{projectId}/metrics', tags=["dashboard"])
+@app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
+def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
+
+
+@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
+@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
+def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
+ data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
+ if data is None:
+ return {"errors": ["custom metric not found"]}
+ return {"data": data}
+
+
+@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
+def get_custom_metric_sessions(projectId: int, metric_id: int,
+ data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
+ if data is None:
+ return {"errors": ["custom metric not found"]}
+ return {"data": data}
+
+
+@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
+def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ data = dashboards2.make_chart_metrics(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
+ data=data)
+ if data is None:
+ return {"errors": ["custom metric not found"]}
+ return {"data": data}
+
+
+@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
+@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
+@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
+def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
+ if data is None:
+ return {"errors": ["custom metric not found"]}
+ return {"data": data}
+
+
+@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
+@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
+@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
+@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
+def update_custom_metric_state(projectId: int, metric_id: int,
+ data: schemas.UpdateCustomMetricsStatusSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {
+ "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
+ status=data.active)}
+
+
+@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
+@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
+def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
diff --git a/api/routers/app/v1_api.py b/api/routers/subs/v1_api.py
similarity index 100%
rename from api/routers/app/v1_api.py
rename to api/routers/subs/v1_api.py
diff --git a/api/schemas.py b/api/schemas.py
index 77cb78c05..b01553cc8 100644
--- a/api/schemas.py
+++ b/api/schemas.py
@@ -776,6 +776,7 @@ class CustomMetricCreateSeriesSchema(BaseModel):
class MetricTimeseriesViewType(str, Enum):
line_chart = "lineChart"
progress = "progress"
+ area_chart = "areaChart"
class MetricTableViewType(str, Enum):
@@ -803,8 +804,8 @@ class TimeseriesMetricOfType(str, Enum):
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch):
- startDate: int = Field(TimeUTC.now(-7))
- endDate: int = Field(TimeUTC.now())
+ startTimestamp: int = Field(TimeUTC.now(-7))
+ endTimestamp: int = Field(TimeUTC.now())
class Config:
alias_generator = attribute_to_camel_case
@@ -817,10 +818,10 @@ class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema):
alias_generator = attribute_to_camel_case
-class CreateCustomMetricsSchema(CustomMetricChartPayloadSchema):
+class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
name: str = Field(...)
- series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
- is_public: bool = Field(default=True, const=True)
+ series: List[CustomMetricCreateSeriesSchema] = Field(...)
+ is_public: bool = Field(default=True)
view_type: Union[MetricTimeseriesViewType, MetricTableViewType] = Field(MetricTimeseriesViewType.line_chart)
metric_type: MetricType = Field(MetricType.timeseries)
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
@@ -858,6 +859,10 @@ class CreateCustomMetricsSchema(CustomMetricChartPayloadSchema):
alias_generator = attribute_to_camel_case
+class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
+ series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
+
+
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
series_id: Optional[int] = Field(None)
@@ -875,3 +880,99 @@ class UpdateCustomMetricsStatusSchema(BaseModel):
class SavedSearchSchema(FunnelSchema):
filter: FlatSessionsSearchPayloadSchema = Field([])
+
+
+class CreateDashboardSchema(BaseModel):
+ name: str = Field(..., min_length=1)
+ is_public: bool = Field(default=False)
+ is_pinned: bool = Field(default=False)
+ metrics: Optional[List[int]] = Field(default=[])
+
+ class Config:
+ alias_generator = attribute_to_camel_case
+
+
+class EditDashboardSchema(CreateDashboardSchema):
+ is_public: Optional[bool] = Field(default=None)
+ is_pinned: Optional[bool] = Field(default=None)
+
+
+class UpdateWidgetPayloadSchema(BaseModel):
+ config: dict = Field(default={})
+
+ class Config:
+ alias_generator = attribute_to_camel_case
+
+
+class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema):
+ metric_id: int = Field(...)
+
+ class Config:
+ alias_generator = attribute_to_camel_case
+
+
+# these values should match the keys in metrics table
+class TemplatePredefinedKeys(str, Enum):
+ count_sessions = "count_sessions"
+ avg_request_load_time = "avg_request_load_time"
+ avg_page_load_time = "avg_page_load_time"
+ avg_image_load_time = "avg_image_load_time"
+ avg_dom_content_load_start = "avg_dom_content_load_start"
+ avg_first_contentful_pixel = "avg_first_contentful_pixel"
+ avg_visited_pages = "avg_visited_pages"
+ avg_session_duration = "avg_session_duration"
+ avg_pages_dom_buildtime = "avg_pages_dom_buildtime"
+ avg_pages_response_time = "avg_pages_response_time"
+ avg_response_time = "avg_response_time"
+ avg_first_paint = "avg_first_paint"
+ avg_dom_content_loaded = "avg_dom_content_loaded"
+ avg_till_first_bit = "avg_till_first_byte"
+ avg_time_to_interactive = "avg_time_to_interactive"
+ count_requests = "count_requests"
+ avg_time_to_render = "avg_time_to_render"
+ avg_used_js_heap_size = "avg_used_js_heap_size"
+ avg_cpu = "avg_cpu"
+ avg_fps = "avg_fps"
+ impacted_sessions_by_js_errors = "impacted_sessions_by_js_errors"
+ domains_errors_4xx = "domains_errors_4xx"
+ domains_errors_5xx = "domains_errors_5xx"
+ errors_per_domains = "errors_per_domains"
+ calls_errors = "calls_errors"
+ errors_by_type = "errors_per_type"
+ errors_by_origin = "resources_by_party"
+ speed_index_by_location = "speed_location"
+ slowest_domains = "slowest_domains"
+ sessions_per_browser = "sessions_per_browser"
+ time_to_render = "time_to_render"
+ impacted_sessions_by_slow_pages = "impacted_sessions_by_slow_pages"
+ memory_consumption = "memory_consumption"
+ cpu_load = "cpu"
+ frame_rate = "fps"
+ crashes = "crashes"
+ resources_vs_visually_complete = "resources_vs_visually_complete"
+ pages_dom_buildtime = "pages_dom_buildtime"
+ pages_response_time = "pages_response_time"
+ pages_response_time_distribution = "pages_response_time_distribution"
+ missing_resources = "missing_resources"
+ slowest_resources = "slowest_resources"
+ resources_fetch_time = "resources_loading_time"
+ resource_type_vs_response_end = "resource_type_vs_response_end"
+ resources_count_by_type = "resources_count_by_type"
+
+
+class TemplatePredefinedUnits(str, Enum):
+ millisecond = "ms"
+ minute = "min"
+ memory = "mb"
+ frame = "f/s"
+ percentage = "%"
+ count = "count"
+
+
+class CustomMetricAndTemplate(BaseModel):
+ is_template: bool = Field(...)
+ project_id: Optional[int] = Field(...)
+ predefined_key: Optional[TemplatePredefinedKeys] = Field(...)
+
+ class Config:
+ alias_generator = attribute_to_camel_case
diff --git a/ee/api/.env.default b/ee/api/.env.default
index 28f46f273..778a8f32c 100644
--- a/ee/api/.env.default
+++ b/ee/api/.env.default
@@ -46,6 +46,8 @@ pg_port=5432
pg_user=postgres
pg_timeout=30
pg_minconn=45
+PG_RETRY_MAX=50
+PG_RETRY_INTERVAL=2
put_S3_TTL=20
sentryURL=
sessions_bucket=mobs
diff --git a/ee/api/.gitignore b/ee/api/.gitignore
index f1ff9550b..488fab072 100644
--- a/ee/api/.gitignore
+++ b/ee/api/.gitignore
@@ -180,9 +180,6 @@ Pipfile
/chalicelib/core/alerts.py
/chalicelib/core/alerts_processor.py
/chalicelib/core/announcements.py
-/chalicelib/blueprints/bp_app_api.py
-/chalicelib/blueprints/bp_core.py
-/chalicelib/blueprints/bp_core_crons.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/errors_favorite_viewed.py
/chalicelib/core/events.py
@@ -237,7 +234,6 @@ Pipfile
/chalicelib/utils/smtp.py
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
-/chalicelib/blueprints/app/__init__.py
/routers/app/__init__.py
/routers/crons/__init__.py
/routers/subs/__init__.py
@@ -245,7 +241,6 @@ Pipfile
/chalicelib/core/assist.py
/auth/auth_apikey.py
/auth/auth_jwt.py
-/chalicelib/blueprints/subs/bp_insights.py
/build.sh
/routers/core.py
/routers/crons/core_crons.py
@@ -257,10 +252,11 @@ Pipfile
/chalicelib/core/heatmaps.py
/routers/subs/insights.py
/schemas.py
-/chalicelib/blueprints/app/v1_api.py
-/routers/app/v1_api.py
/chalicelib/core/custom_metrics.py
/chalicelib/core/performance_event.py
/chalicelib/core/saved_search.py
/app_alerts.py
/build_alerts.sh
+/routers/subs/metrics.py
+/routers/subs/v1_api.py
+/chalicelib/core/dashboards2.py
diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile
index cca6e6806..ee88ee22c 100644
--- a/ee/api/Dockerfile
+++ b/ee/api/Dockerfile
@@ -6,6 +6,7 @@ WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env
+ENV APP_NAME chalice
# Add Tini
# Startup daemon
diff --git a/ee/api/Dockerfile.alerts b/ee/api/Dockerfile.alerts
index 9be6ebc93..230514918 100644
--- a/ee/api/Dockerfile.alerts
+++ b/ee/api/Dockerfile.alerts
@@ -7,6 +7,7 @@ COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
ENV pg_minconn 2
+ENV APP_NAME alerts
# Add Tini
# Startup daemon
diff --git a/ee/api/_clickhouse_upgrade.sh b/ee/api/_clickhouse_upgrade.sh
deleted file mode 100644
index 9b656a584..000000000
--- a/ee/api/_clickhouse_upgrade.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-sudo yum update
-sudo yum install yum-utils
-sudo rpm --import https://repo.clickhouse.com/CLICKHOUSE-KEY.GPG
-sudo yum-config-manager --add-repo https://repo.clickhouse.com/rpm/stable/x86_64
-sudo yum update
-sudo service clickhouse-server restart
-
-
-#later mus use in clickhouse-client:
-#SET allow_experimental_window_functions = 1;
\ No newline at end of file
diff --git a/ee/api/app.py b/ee/api/app.py
index fdf7f60b8..ed2c01aa4 100644
--- a/ee/api/app.py
+++ b/ee/api/app.py
@@ -11,10 +11,10 @@ from starlette.responses import StreamingResponse, JSONResponse
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from routers import core, core_dynamic, ee, saml
-from routers.app import v1_api, v1_api_ee
+from routers.subs import v1_api
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
-from routers.subs import dashboard
+from routers.subs import dashboard, insights, v1_api_ee
app = FastAPI()
@@ -65,7 +65,7 @@ app.include_router(saml.public_app)
app.include_router(saml.app)
app.include_router(saml.app_apikey)
app.include_router(dashboard.app)
-# app.include_router(insights.app)
+app.include_router(insights.app)
app.include_router(v1_api.app_apikey)
app.include_router(v1_api_ee.app_apikey)
diff --git a/ee/api/chalicelib/core/dashboard.py b/ee/api/chalicelib/core/dashboard.py
index c5c373c78..c36c877da 100644
--- a/ee/api/chalicelib/core/dashboard.py
+++ b/ee/api/chalicelib/core/dashboard.py
@@ -169,7 +169,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
ch_query = f"""\
SELECT
toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(sessions.session_id) AS count
+ COUNT(sessions.session_id) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -181,19 +181,17 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
rows = ch.execute(query=ch_query, params=params)
results = {
- "count": sum([r["count"] for r in rows]),
+ "value": sum([r["value"] for r in rows]),
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp,
density=density,
- neutral={"count": 0})
+ neutral={"value": 0})
}
diff = endTimestamp - startTimestamp
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
- ch_query = f"""\
- SELECT
- COUNT(sessions.session_id) AS count
+ ch_query = f""" SELECT COUNT(sessions.session_id) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@@ -203,7 +201,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
count = count[0]["count"]
- results["countProgress"] = helper.__progress(old_val=count, new_val=results["count"])
+ results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
return results
@@ -222,9 +220,8 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta
with ch_client.ClickHouseClient() as ch:
ch_query = f"""\
- SELECT
- toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(DISTINCT errors.session_id) AS count
+ SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
+ COUNT(DISTINCT errors.session_id) AS count
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -304,9 +301,8 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
errors = {}
for error_id in error_ids:
ch_query = f"""\
- SELECT
- toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(errors.session_id) AS count
+ SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
+ COUNT(errors.session_id) AS count
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -348,10 +344,8 @@ def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query += meta_condition
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
ch_query = f"""\
- SELECT
- COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
--- COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_start ,0)),0) AS avg_dom_content_load_start,
- COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
+ SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
+ COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS avg_first_contentful_pixel
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@@ -395,8 +389,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
ch_sub_query += meta_condition
ch_sub_query.append("resources.type= %(type)s")
ch_query = f"""\
- SELECT
- AVG(NULLIF(resources.duration,0)) AS avg
+ SELECT AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
row = ch.execute(query=ch_query,
@@ -443,9 +436,8 @@ def __get_user_activity(cur, project_id, startTimestamp, endTimestamp, **args):
ch_sub_query += meta_condition
ch_query = f"""\
- SELECT
- COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
- COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
+ SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
+ COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS avg_session_duration
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@@ -464,47 +456,56 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query.append("resources.type = 'img'")
ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
ch_sub_query_chart.append("resources.type = 'img'")
- ch_sub_query_chart.append("resources.url = %(url)s")
+ ch_sub_query_chart.append("resources.url IN %(url)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
- AVG(NULLIF(resources.duration,0)) AS avg,
- COUNT(resources.session_id) AS count
+ AVG(NULLIF(resources.duration,0)) AS avg,
+ COUNT(resources.session_id) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
-
- rows = ch.execute(query=ch_query,
- params={"project_id": project_id, "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp, **__get_constraint_values(args)})
+ params = {"project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}
+ # print(ch.client().substitute_params(ch_query, params))
+ rows = ch.execute(query=ch_query, params=params)
rows = [{"url": i["url"], "avgDuration": i["avg"], "sessions": i["count"]} for i in rows]
-
+ if len(rows) == 0:
+ return []
urls = [row["url"] for row in rows]
charts = {}
+ ch_query = f"""\
+ SELECT url,
+ toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS avg
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query_chart)}
+ GROUP BY url, timestamp
+ ORDER BY url, timestamp;"""
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, "url": urls, **__get_constraint_values(args)}
+ # print(ch.client().substitute_params(ch_query, params))
+ u_rows = ch.execute(query=ch_query, params=params)
for url in urls:
- ch_query = f"""\
- SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(NULLIF(resources.duration,0)) AS avg
- FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
- WHERE {" AND ".join(ch_sub_query_chart)}
- GROUP BY timestamp
- ORDER BY timestamp;"""
- params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp, "url": url, **__get_constraint_values(args)}
- r = ch.execute(query=ch_query, params=params)
+ sub_rows = []
+ for r in u_rows:
+ if r["url"] == url:
+ sub_rows.append(r)
+ elif len(sub_rows) > 0:
+ break
charts[url] = [{"timestamp": int(i["timestamp"]),
"avgDuration": i["avg"]}
- for i in __complete_missing_steps(rows=r, start_time=startTimestamp,
+ for i in __complete_missing_steps(rows=sub_rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"avg": 0})]
for i in range(len(rows)):
rows[i] = helper.dict_to_camel_case(rows[i])
- rows[i]["chart"] = [helper.dict_to_camel_case(chart) for chart in charts[rows[i]["url"]]]
+ rows[i]["chart"] = helper.list_to_camel_case(charts[rows[i]["url"]])
return sorted(rows, key=lambda k: k["sessions"], reverse=True)
@@ -544,9 +545,8 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(NULLIF(resources.duration,0)) AS avg
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'img'
@@ -558,9 +558,8 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"avg": 0})]
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(NULLIF(resources.duration,0)) AS avg
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
AND resources.type = 'fetch'
@@ -577,9 +576,8 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
data=args)
ch_sub_query_chart += meta_condition
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(NULLIF(pages.load_event_end ,0)) AS avg
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(pages.load_event_end ,0)) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
@@ -648,9 +646,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
if resource_type == "ALL" and not pages_only and not events_only:
ch_sub_query.append("positionUTF8(url_hostpath,%(value)s)!=0")
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value,
- type AS key
+ ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value,
+ type AS key
FROM resources
WHERE {" AND ".join(ch_sub_query)}
GROUP BY type
@@ -685,9 +682,8 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
ch_sub_query.append(f"resources.type = '{__get_resource_db_type_from_type(resource_type)}'")
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- DISTINCT url_hostpath AS value,
- %(resource_type)s AS key
+ ch_query = f"""SELECT DISTINCT url_hostpath AS value,
+ %(resource_type)s AS key
FROM resources
WHERE {" AND ".join(ch_sub_query)}
LIMIT 10;"""
@@ -787,34 +783,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
return [helper.dict_to_camel_case(row) for row in rows]
-# def frustration_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
-# endTimestamp=TimeUTC.now(), **args):
-# with pg_client.PostgresClient() as cur:
-# sub_q = ""
-# if platform == 'mobile':
-# sub_q = "AND s.user_device_type = 'mobile' AND s.project_id = %(project_id)s AND s.start_ts >= %(startTimestamp)s AND s.start_ts < %(endTimestamp)s"
-# elif platform == 'desktop':
-# sub_q = "AND s.user_device_type = 'desktop' AND s.project_id = %(project_id)s AND s.start_ts >= %(startTimestamp)s AND s.start_ts < %(endTimestamp)s"
-#
-# cur.execute(cur.mogrify(f"""\
-# SELECT s.project_id,
-# s.session_id::text AS session_id,
-# s.*
-# FROM public.sessions AS s
-# LEFT JOIN public.session_watchdogs AS sw ON s.session_id=sw.session_id
-# LEFT JOIN public.watchdogs AS w ON w.watchdog_id=sw.watchdog_id
-# WHERE s.project_id = %(project_id)s
-# AND w.type='clickrage'
-# AND s.start_ts>=%(startTimestamp)s
-# AND s.start_ts<=%(endTimestamp)s
-# {sub_q}
-# ORDER BY s.session_id DESC
-# LIMIT 5;""",
-# {"project_id": project_id, "startTimestamp": startTimestamp,
-# "endTimestamp": endTimestamp}))
-# return helper.list_to_camel_case(cur.fetchall())
-
-
def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
density=7, **args):
@@ -826,9 +794,8 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- resources.url_hostpath AS key,
- COUNT(resources.session_id) AS doc_count
+ ch_query = f"""SELECT resources.url_hostpath AS key,
+ COUNT(resources.session_id) AS doc_count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY url_hostpath
@@ -841,10 +808,9 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
if len(rows) == 0:
return []
ch_sub_query.append("resources.url_hostpath = %(value)s")
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- COUNT(resources.session_id) AS doc_count,
- toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ COUNT(resources.session_id) AS doc_count,
+ toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp
@@ -879,9 +845,8 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- resources.url_hostpath, COUNT(resources.session_id) AS doc_count
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ resources.url_hostpath, COUNT(resources.session_id) AS doc_count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp, resources.url_hostpath
@@ -935,9 +900,8 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(NULLIF(resources.duration,0)) AS avg
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -969,9 +933,8 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(pages.dom_building_time) AS avg
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(pages.dom_building_time) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -985,10 +948,10 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avg": avg,
+ return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
- density=density, neutral={"avg": 0})}
+ density=density, neutral={"value": 0})}
def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@@ -1009,54 +972,50 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query_chart.append("isNotNull(resources.duration)")
ch_sub_query_chart.append("resources.duration>0")
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- splitByChar('/', resources.url_hostpath)[-1] AS name,
- AVG(NULLIF(resources.duration,0)) AS avg
+ ch_query = f"""SELECT any(url) AS url, any(type) AS type,
+ splitByChar('/', resources.url_hostpath)[-1] AS name,
+ AVG(NULLIF(resources.duration,0)) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY name
ORDER BY avg DESC
LIMIT 10;"""
- rows = ch.execute(query=ch_query,
- params={"project_id": project_id,
- "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp, **__get_constraint_values(args)})
- ch_sub_query_chart.append("endsWith(resources.url_hostpath, %(url)s)>0")
+ params = {"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)}
+ print(ch.format(query=ch_query, params=params))
+ rows = ch.execute(query=ch_query, params=params)
+
ch_sub_query.append(ch_sub_query_chart[-1])
results = []
+ names = {f"name_{i}": r["name"] for i, r in enumerate(rows)}
+ ch_query = f"""SELECT splitByChar('/', resources.url_hostpath)[-1] AS name,
+ toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(resources.duration) AS avg
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query_chart)}
+ AND ({" OR ".join([f"endsWith(resources.url_hostpath, %(name_{i})s)>0" for i in range(len(names.keys()))])})
+ GROUP BY name,timestamp
+ ORDER BY name,timestamp;"""
+ params = {"step_size": step_size, "project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ **names, **__get_constraint_values(args)}
+ # print(ch.format(query=ch_query, params=params))
+ charts = ch.execute(query=ch_query, params=params)
for r in rows:
- # if isinstance(r["url"], bytes):
- # try:
- # r["url"] = r["url"].decode("utf-8")
- # except UnicodeDecodeError:
- # continue
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
- AVG(resources.duration) AS avg
- FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
- WHERE {" AND ".join(ch_sub_query_chart)}
- GROUP BY timestamp
- ORDER BY timestamp;"""
- chart = ch.execute(query=ch_query,
- params={"step_size": step_size, "project_id": project_id,
- "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp,
- "url": r["name"], **__get_constraint_values(args)})
- r["chart"] = __complete_missing_steps(rows=chart, start_time=startTimestamp,
+ sub_chart = []
+ for c in charts:
+ if c["name"] == r["name"]:
+ cc = dict(c)
+ cc.pop("name")
+ sub_chart.append(cc)
+ elif len(sub_chart) > 0:
+ break
+ r["chart"] = __complete_missing_steps(rows=sub_chart, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"avg": 0})
- ch_query = f"""SELECT url, type
- FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
- WHERE {" AND ".join(ch_sub_query)}
- ORDER BY duration DESC
- LIMIT 1;"""
- url = ch.execute(query=ch_query,
- params={"project_id": project_id,
- "startTimestamp": startTimestamp,
- "endTimestamp": endTimestamp,
- "url": r["name"], **__get_constraint_values(args)})
- r["url"] = url[0]["url"]
- r["type"] = __get_resource_type_from_db_type(url[0]["type"])
+ r["type"] = __get_resource_type_from_db_type(r["type"])
results.append(r)
return results
@@ -1119,7 +1078,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
ch_sub_query_chart.append(f"url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(pages.response_time) AS avg
+ AVG(pages.response_time) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1134,10 +1093,10 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avg": avg,
+ return {"value": avg,
"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
- density=density, neutral={"avg": 0})}
+ density=density, neutral={"value": 0})}
def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@@ -1268,9 +1227,8 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- intDiv(toHour(sessions.datetime),2)*2 AS hour,
- COUNT(sessions.session_id) AS count
+ ch_query = f"""SELECT intDiv(toHour(sessions.datetime),2)*2 AS hour,
+ COUNT(sessions.session_id) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY hour
@@ -1320,7 +1278,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(pages.visually_complete) AS avg
+ AVG(pages.visually_complete) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1334,9 +1292,9 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avg": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
- end_time=endTimestamp, density=density,
- neutral={"avg": 0})}
+ return {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
+ end_time=endTimestamp, density=density,
+ neutral={"value": 0})}
def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@@ -1353,7 +1311,7 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(DISTINCT pages.session_id) AS count
+ COUNT(DISTINCT pages.session_id) AS count
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
AND (pages.response_time)>(SELECT AVG(pages.response_time)
@@ -1382,7 +1340,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(performance.avg_used_js_heap_size) AS avg_used_js_heap_size
+ AVG(performance.avg_used_js_heap_size) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1396,11 +1354,11 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avgUsedJsHeapSize": avg,
+ return {"value": avg,
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
- neutral={"avg_used_js_heap_size": 0}))}
+ neutral={"value": 0}))}
def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@@ -1413,7 +1371,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(performance.avg_cpu) AS avg_cpu
+ AVG(performance.avg_cpu) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1427,11 +1385,11 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avgCpu": avg,
+ return {"value": avg,
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
- neutral={"avg_cpu": 0}))}
+ neutral={"value": 0}))}
def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@@ -1444,7 +1402,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(performance.avg_fps) AS avg_fps
+ AVG(performance.avg_fps) AS value
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1458,11 +1416,11 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM performance {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
- return {"avgFps": avg,
+ return {"value": avg,
"chart": helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
- neutral={"avg_fps": 0}))}
+ neutral={"value": 0}))}
def __get_crashed_sessions_ids(project_id, startTimestamp, endTimestamp):
@@ -1698,9 +1656,8 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- resources.url_host AS domain,
- AVG(resources.duration) AS avg
+ ch_query = f"""SELECT resources.url_host AS domain,
+ AVG(resources.duration) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.url_host
@@ -1747,15 +1704,13 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- b.user_browser AS browser,
- b.count,
- groupArray([bv.user_browser_version, toString(bv.count)]) AS versions
+ ch_query = f"""SELECT b.user_browser AS browser,
+ b.count,
+ groupArray([bv.user_browser_version, toString(bv.count)]) AS versions
FROM
(
- SELECT
- sessions.user_browser,
- COUNT(sessions.session_id) AS count
+ SELECT sessions.user_browser,
+ COUNT(sessions.session_id) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY sessions.user_browser
@@ -1764,10 +1719,9 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
) AS b
INNER JOIN
(
- SELECT
- sessions.user_browser,
- sessions.user_browser_version,
- COUNT(sessions.session_id) AS count
+ SELECT sessions.user_browser,
+ sessions.user_browser_version,
+ COUNT(sessions.session_id) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY
@@ -1934,8 +1888,8 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(resources.session_id) AS total,
- SUM(if(resources.type='fetch',1,0)) AS xhr
+ COUNT(resources.session_id) AS total,
+ SUM(if(resources.type='fetch',1,0)) AS xhr
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -1946,7 +1900,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
density=density,
neutral={"total": 0, "xhr": 0})
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- AVG(pages.response_end) AS avg_response_end
+ AVG(pages.response_end) AS avg_response_end
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart_response_end)}
GROUP BY timestamp
@@ -1969,8 +1923,8 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
- COUNT(DISTINCT errors.session_id) AS sessions_count,
- COUNT(DISTINCT errors.error_id) AS errors_count
+ COUNT(DISTINCT errors.session_id) AS sessions_count,
+ COUNT(DISTINCT errors.error_id) AS errors_count
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@@ -2008,15 +1962,13 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
- ch_query = f"""SELECT
- toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp,
- AVG(NULLIF(s.count,0)) AS avg,
- groupArray([toString(t.type), toString(t.xavg)]) AS types
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp,
+ AVG(NULLIF(s.count,0)) AS avg,
+ groupArray([toString(t.type), toString(t.xavg)]) AS types
FROM
- ( SELECT
- resources.session_id,
- MIN(resources.datetime) AS base_datetime,
- COUNT(resources.url) AS count
+ ( SELECT resources.session_id,
+ MIN(resources.datetime) AS base_datetime,
+ COUNT(resources.url) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY resources.session_id
@@ -2137,3 +2089,490 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
density=density,
neutral={"first_party": 0,
"third_party": 0}))
+
+
+def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with ch_client.ClickHouseClient() as ch:
+ row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ results = helper.dict_to_camel_case(row)
+ results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ ch_query = f"""\
+ SELECT AVG(NULLIF(pages.load_event_end ,0)) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+ row = ch.execute(query=ch_query, params=params)[0]
+ result = row
+ for k in result:
+ if result[k] is None:
+ result[k] = 0
+ return result
+
+
+def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, resources=None, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
+ location_constraints = []
+ meta_condition = __get_meta_constraint(args)
+
+ location_constraints_vals = {}
+
+ if resources and len(resources) > 0:
+ for r in resources:
+ if r["type"] == "LOCATION":
+ location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s")
+ location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value']
+
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with ch_client.ClickHouseClient() as ch:
+ ch_sub_query_chart = __get_basic_constraints(table_name="pages", round_start=True,
+ data=args)
+ ch_sub_query_chart += meta_condition
+
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(pages.load_event_end ,0)) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query_chart)}
+ {(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
+ GROUP BY timestamp
+ ORDER BY timestamp;"""
+
+ rows = ch.execute(query=ch_query,
+ params={**params, **location_constraints_vals, **__get_constraint_values(args)})
+ pages = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
+ __complete_missing_steps(rows=rows, start_time=startTimestamp,
+ end_time=endTimestamp,
+ density=density, neutral={"value": 0})]
+
+ for s in pages:
+ for k in s:
+ if s[k] is None:
+ s[k] = 0
+ return pages
+
+
+def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with ch_client.ClickHouseClient() as ch:
+ row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ results = helper.dict_to_camel_case(row)
+ results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_application_activity_avg_image_load_time(ch, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+ ch_sub_query.append("resources.type= %(type)s")
+ ch_query = f"""\
+ SELECT AVG(NULLIF(resources.duration,0)) AS value
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ row = ch.execute(query=ch_query,
+ params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]
+ result = row
+ for k in result:
+ if result[k] is None:
+ result[k] = 0
+ return result
+
+
+def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, resources=None, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
+ img_constraints = []
+ ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query_chart += meta_condition
+
+ img_constraints_vals = {}
+
+ if resources and len(resources) > 0:
+ for r in resources:
+ if r["type"] == "IMG":
+ img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
+ img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
+
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS value
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query_chart)}
+ AND resources.type = 'img'
+ {(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
+ GROUP BY timestamp
+ ORDER BY timestamp;"""
+ rows = ch.execute(query=ch_query, params={**params, **img_constraints_vals, **__get_constraint_values(args)})
+ images = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
+ __complete_missing_steps(rows=rows, start_time=startTimestamp,
+ end_time=endTimestamp,
+ density=density, neutral={"value": 0})]
+
+ for s in images:
+ for k in s:
+ if s[k] is None:
+ s[k] = 0
+ return images
+
+
+def get_application_activity_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with ch_client.ClickHouseClient() as ch:
+ row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ results = helper.dict_to_camel_case(row)
+ results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ row = __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args)
+ previous = helper.dict_to_camel_case(row)
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_application_activity_avg_request_load_time(ch, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+ ch_sub_query.append("resources.type= %(type)s")
+ ch_query = f"""\
+ SELECT AVG(NULLIF(resources.duration,0)) AS value
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ row = ch.execute(query=ch_query,
+ params={"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]
+ result = row
+ for k in result:
+ if result[k] is None:
+ result[k] = 0
+ return result
+
+
+def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(),
+ density=19, resources=None, **args):
+ step_size = __get_step_size(endTimestamp=endTimestamp, startTimestamp=startTimestamp, density=density)
+ request_constraints = []
+ ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query_chart += meta_condition
+
+ request_constraints_vals = {}
+
+ if resources and len(resources) > 0:
+ for r in resources:
+ if r["type"] != "IMG" and r["type"] == "LOCATION":
+ request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
+ request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
+ params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp}
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
+ AVG(NULLIF(resources.duration,0)) AS value
+ FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query_chart)}
+ AND resources.type = 'fetch'
+ {(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
+ GROUP BY timestamp
+ ORDER BY timestamp;"""
+ rows = ch.execute(query=ch_query,
+ params={**params, **request_constraints_vals, **__get_constraint_values(args)})
+ requests = [{"timestamp": i["timestamp"], "value": i["value"]} for i in
+ __complete_missing_steps(rows=rows, start_time=startTimestamp,
+ end_time=endTimestamp, density=density,
+ neutral={"value": 0})]
+
+ for s in requests:
+ for k in s:
+ if s[k] is None:
+ s[k] = 0
+ return requests
+
+
+def get_page_metrics_avg_dom_content_load_start(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with ch_client.ClickHouseClient() as ch:
+ rows = __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+ ch_query = f"""\
+ SELECT COALESCE(AVG(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+ rows = ch.execute(query=ch_query, params=params)
+ return rows
+
+
+def get_page_metrics_avg_first_contentful_pixel(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ with ch_client.ClickHouseClient() as ch:
+ rows = __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+ # changed dom_content_loaded_event_start to dom_content_loaded_event_end
+ ch_query = f"""\
+ SELECT COALESCE(AVG(NULLIF(pages.first_contentful_paint,0)),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+ rows = ch.execute(query=ch_query, params=params)
+ return rows
+
+
+def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ results = {}
+
+ with ch_client.ClickHouseClient() as ch:
+ rows = __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ for key in results:
+ if isnan(results[key]):
+ results[key] = 0
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTimestamp, **args)
+
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_user_activity_avg_visited_pages(cur, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ ch_query = f"""\
+ SELECT COALESCE(CEIL(AVG(NULLIF(sessions.pages_count,0))),0) AS value
+ FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+
+ rows = cur.execute(query=ch_query, params=params)
+
+ return rows
+
+
+def get_user_activity_avg_session_duration(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), **args):
+ results = {}
+
+ with ch_client.ClickHouseClient() as ch:
+ rows = __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args)
+ if len(rows) > 0:
+ results = helper.dict_to_camel_case(rows[0])
+ for key in results:
+ if isnan(results[key]):
+ results[key] = 0
+ diff = endTimestamp - startTimestamp
+ endTimestamp = startTimestamp
+ startTimestamp = endTimestamp - diff
+ rows = __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, endTimestamp, **args)
+
+ if len(rows) > 0:
+ previous = helper.dict_to_camel_case(rows[0])
+ results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
+ return results
+
+
+def __get_user_activity_avg_session_duration(cur, project_id, startTimestamp, endTimestamp, **args):
+ ch_sub_query = __get_basic_constraints(table_name="sessions", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ ch_query = f"""\
+ SELECT COALESCE(AVG(NULLIF(sessions.duration,0)),0) AS value
+ FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
+ **__get_constraint_values(args)}
+
+ rows = cur.execute(query=ch_query, params=params)
+
+ return rows
+
+
+def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COALESCE(AVG(pages.response_time),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0;"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
+
+
+def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COUNT(pages.session_id) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)};"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
+
+
+def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COALESCE(AVG(pages.first_paint),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0;"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
+
+
+def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COALESCE(AVG(pages.dom_content_loaded_event_time),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0;"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
+
+
+def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COALESCE(AVG(pages.ttfb),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0;"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
+
+
+def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
+ endTimestamp=TimeUTC.now(), value=None, **args):
+ ch_sub_query = __get_basic_constraints(table_name="pages", data=args)
+ meta_condition = __get_meta_constraint(args)
+ ch_sub_query += meta_condition
+
+ if value is not None:
+ ch_sub_query.append("pages.url_path = %(value)s")
+ with ch_client.ClickHouseClient() as ch:
+ ch_query = f"""SELECT COALESCE(AVG(pages.time_to_interactive),0) AS value
+ FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
+ WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0;"""
+ rows = ch.execute(query=ch_query,
+ params={"project_id": project_id,
+ "startTimestamp": startTimestamp,
+ "endTimestamp": endTimestamp,
+ "value": value, **__get_constraint_values(args)})
+ return helper.dict_to_camel_case(rows[0])
diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py
index 387029fd4..ff9d4dad4 100644
--- a/ee/api/chalicelib/core/insights.py
+++ b/ee/api/chalicelib/core/insights.py
@@ -1,9 +1,9 @@
-from chalicelib.core import sessions_metas
-from chalicelib.utils import helper, dev
-from chalicelib.utils import ch_client
-from chalicelib.utils.TimeUTC import TimeUTC
-from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps
+import schemas
from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint
+from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps
+from chalicelib.utils import ch_client
+from chalicelib.utils import helper, dev
+from chalicelib.utils.TimeUTC import TimeUTC
def __transform_journey(rows):
@@ -42,7 +42,7 @@ def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=
elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append(f"sessions_metadata.project_id = %(project_id)s")
meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)")
@@ -303,7 +303,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.user_id IS NOT NULL")
meta_condition.append("not empty(sessions_metadata.user_id)")
@@ -404,7 +404,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70),
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.user_id IS NOT NULL")
meta_condition.append("not empty(sessions_metadata.user_id)")
@@ -512,7 +512,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.user_id IS NOT NULL")
meta_condition.append("not empty(sessions_metadata.user_id)")
@@ -586,7 +586,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.user_id IS NOT NULL")
meta_condition.append("not empty(sessions_metadata.user_id)")
@@ -672,7 +672,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("user_id IS NOT NULL")
meta_condition.append("not empty(sessions_metadata.user_id)")
@@ -742,7 +742,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.project_id = %(project_id)s")
meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)")
@@ -807,7 +807,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.project_id = %(project_id)s")
meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)")
@@ -847,7 +847,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime
for f in filters:
if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]:
period = f["value"]
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
period_function = PERIOD_TO_FUNCTION[period]
@@ -940,7 +940,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
- elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
+ elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
meta_condition.append("sessions_metadata.project_id = %(project_id)s")
meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)")
@@ -1044,4 +1044,4 @@ def search(text, feature_type, project_id, platform=None):
rows = ch.execute(ch_query, params)
else:
return []
- return [helper.dict_to_camel_case(row) for row in rows]
\ No newline at end of file
+ return [helper.dict_to_camel_case(row) for row in rows]
diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py
index 0255c8c8c..3072f55a0 100644
--- a/ee/api/chalicelib/core/projects.py
+++ b/ee/api/chalicelib/core/projects.py
@@ -82,22 +82,22 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
rows = cur.fetchall()
if recording_state:
project_ids = [f'({r["project_id"]})' for r in rows]
- query = f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
- FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
- LEFT JOIN sessions USING (project_id)
- GROUP BY project_id;"""
- cur.execute(
- query=query
- )
+ query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
+ FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
+ LEFT JOIN sessions USING (project_id)
+ WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
+ GROUP BY project_id;""",
+ {"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
+
+ cur.execute(query=query)
status = cur.fetchall()
for r in rows:
+ r["status"] = "red"
for s in status:
if s["project_id"] == r["project_id"]:
- if s["last"] < TimeUTC.now(-2):
- r["status"] = "red"
- elif s["last"] < TimeUTC.now(-1):
+ if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1):
r["status"] = "yellow"
- else:
+ elif s["last"] >= TimeUTC.now(-1):
r["status"] = "green"
break
diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py
index babdd669a..aa45699f7 100644
--- a/ee/api/chalicelib/utils/ch_client.py
+++ b/ee/api/chalicelib/utils/ch_client.py
@@ -25,5 +25,8 @@ class ClickHouseClient:
def client(self):
return self.__client
+ def format(self, query, params):
+ return self.__client.substitute_params(query, params)
+
def __exit__(self, *args):
pass
diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt
index 84a372567..5909d31c1 100644
--- a/ee/api/requirements.txt
+++ b/ee/api/requirements.txt
@@ -4,11 +4,11 @@ boto3==1.16.1
pyjwt==1.7.1
psycopg2-binary==2.8.6
elasticsearch==7.9.1
-jira==2.0.0
+jira==3.1.1
clickhouse-driver==0.2.2
python3-saml==1.12.0
-fastapi==0.74.1
+fastapi==0.75.0
python-multipart==0.0.5
uvicorn[standard]==0.17.5
python-decouple==3.6
diff --git a/ee/api/routers/app/v1_api_ee.py b/ee/api/routers/subs/v1_api_ee.py
similarity index 100%
rename from ee/api/routers/app/v1_api_ee.py
rename to ee/api/routers/subs/v1_api_ee.py
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.5/1.5.5.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.5/1.5.5.sql
new file mode 100644
index 000000000..8cafd767d
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.5/1.5.5.sql
@@ -0,0 +1,118 @@
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.5.5-ee'
+$$ LANGUAGE sql IMMUTABLE;
+
+
+CREATE TABLE IF NOT EXISTS dashboards
+(
+ dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
+ name text NOT NULL,
+ is_public boolean NOT NULL DEFAULT TRUE,
+ is_pinned boolean NOT NULL DEFAULT FALSE,
+ created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
+ deleted_at timestamp NULL DEFAULT NULL
+);
+
+
+ALTER TABLE IF EXISTS metrics
+ DROP CONSTRAINT IF EXISTS null_project_id_for_template_only,
+ DROP CONSTRAINT IF EXISTS unique_key;
+
+ALTER TABLE IF EXISTS metrics
+ ADD COLUMN IF NOT EXISTS edited_at timestamp NULL DEFAULT NULL,
+ ADD COLUMN IF NOT EXISTS is_pinned boolean NOT NULL DEFAULT FALSE,
+ ADD COLUMN IF NOT EXISTS category text NULL DEFAULT 'custom',
+ ADD COLUMN IF NOT EXISTS is_predefined boolean NOT NULL DEFAULT FALSE,
+ ADD COLUMN IF NOT EXISTS is_template boolean NOT NULL DEFAULT FALSE,
+ ADD COLUMN IF NOT EXISTS predefined_key text NULL DEFAULT NULL,
+ ADD COLUMN IF NOT EXISTS default_config jsonb NOT NULL DEFAULT '{"col": 2,"row": 2,"position": 0}'::jsonb,
+ ALTER COLUMN project_id DROP NOT NULL,
+ ADD CONSTRAINT null_project_id_for_template_only
+ CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ),
+ ADD CONSTRAINT unique_key UNIQUE (predefined_key);
+
+
+
+CREATE TABLE IF NOT EXISTS dashboard_widgets
+(
+ widget_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ dashboard_id integer NOT NULL REFERENCES dashboards (dashboard_id) ON DELETE CASCADE,
+ metric_id integer NOT NULL REFERENCES metrics (metric_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
+ created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
+ config jsonb NOT NULL DEFAULT '{}'::jsonb
+);
+
+COMMIT;
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'areaChart';
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'barChart';
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'stackedBarChart';
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'stackedBarLineChart';
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'overview';
+ALTER TYPE metric_view_type ADD VALUE IF NOT EXISTS 'map';
+ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'predefined';
+
+
+INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type)
+VALUES ('Captured sessions', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_sessions', 'predefined', 'overview'),
+ ('Request Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
+ ('Page Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
+ ('Image Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
+ ('DOM Content Load Start', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
+ ('First Meaningful paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
+ ('No. of Visited Pages', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
+ ('Session Duration', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
+ ('DOM Build Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
+ ('Pages Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
+ ('Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
+ ('First Paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
+ ('DOM Content Loaded', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
+ ('Time Till First byte', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
+ ('Time To Interactive', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
+ ('Captured requests', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_requests', 'predefined', 'overview'),
+ ('Time To Render', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
+ ('Memory Consumption', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
+ ('CPU Load', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
+ ('Frame rate', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_fps', 'predefined', 'overview'),
+
+ ('Sessions Affected by JS Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
+ ('Top Domains with 4xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
+ ('Top Domains with 5xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
+ ('Errors per Domain', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
+ ('Fetch Calls with Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'calls_errors', 'predefined', 'table'),
+ ('Errors by Type', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
+ ('Errors by Origin', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
+
+ ('Speed Index by Location', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'speed_location', 'predefined', 'map'),
+ ('Slowest Domains', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'slowest_domains', 'predefined', 'table'),
+ ('Sessions per Browser', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
+ ('Time To Render', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
+ ('Sessions Impacted by Slow Pages', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
+ ('Memory Consumption', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
+ ('CPU Load', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'cpu', 'predefined', 'areaChart'),
+ ('Frame Rate', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'fps', 'predefined', 'areaChart'),
+ ('Crashes', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'crashes', 'predefined', 'areaChart'),
+ ('Resources Loaded vs Visually Complete', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
+ ('DOM Build Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
+ ('Pages Response Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
+ ('Pages Response Time Distribution', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
+
+ ('Missing Resources', 'resources', '{"col":4,"row":2,"position":0}', true, true, true, 'missing_resources', 'predefined', 'table'),
+ ('Slowest Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'slowest_resources', 'predefined', 'table'),
+ ('Resources Fetch Time', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
+ ('Resource Loaded vs Response End', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
+ ('Breakdown of Loaded Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
+ON CONFLICT (predefined_key) DO UPDATE
+ SET name=excluded.name,
+ category=excluded.category,
+ default_config=excluded.default_config,
+ is_predefined=excluded.is_predefined,
+ is_template=excluded.is_template,
+ is_public=excluded.is_public,
+ metric_type=excluded.metric_type,
+ view_type=excluded.view_type;
\ No newline at end of file
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index 9adab50e0..5abd6f026 100644
--- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
-SELECT 'v1.5.4-ee'
+SELECT 'v1.5.5-ee'
$$ LANGUAGE sql IMMUTABLE;
@@ -106,6 +106,8 @@ $$
('assigned_sessions'),
('autocomplete'),
('basic_authentication'),
+ ('dashboards'),
+ ('dashboard_widgets'),
('errors'),
('funnels'),
('integrations'),
@@ -786,23 +788,33 @@ $$
CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id);
CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id);
- CREATE TYPE metric_type AS ENUM ('timeseries','table');
- CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart');
+ CREATE TYPE metric_type AS ENUM ('timeseries','table', 'predefined');
+ CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart','areaChart','barChart','stackedBarChart','stackedBarLineChart','overview','map');
CREATE TABLE IF NOT EXISTS metrics
(
- metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
- user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
- name text NOT NULL,
- is_public boolean NOT NULL DEFAULT FALSE,
- active boolean NOT NULL DEFAULT TRUE,
- created_at timestamp DEFAULT timezone('utc'::text, now()) not null,
- deleted_at timestamp,
- metric_type metric_type NOT NULL DEFAULT 'timeseries',
- view_type metric_view_type NOT NULL DEFAULT 'lineChart',
- metric_of text NOT NULL DEFAULT 'sessionCount',
- metric_value text[] NOT NULL DEFAULT '{}'::text[],
- metric_format text
+ metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
+ name text NOT NULL,
+ is_public boolean NOT NULL DEFAULT FALSE,
+ active boolean NOT NULL DEFAULT TRUE,
+ created_at timestamp default timezone('utc'::text, now()) not null,
+ deleted_at timestamp,
+ edited_at timestamp,
+ metric_type metric_type NOT NULL DEFAULT 'timeseries',
+ view_type metric_view_type NOT NULL DEFAULT 'lineChart',
+ metric_of text NOT NULL DEFAULT 'sessionCount',
+ metric_value text[] NOT NULL DEFAULT '{}'::text[],
+ metric_format text,
+ category text NULL DEFAULT 'custom',
+ is_pinned boolean NOT NULL DEFAULT FALSE,
+ is_predefined boolean NOT NULL DEFAULT FALSE,
+ is_template boolean NOT NULL DEFAULT FALSE,
+ predefined_key text NULL DEFAULT NULL,
+ default_config jsonb NOT NULL DEFAULT '{"col": 2,"row": 2,"position": 0}'::jsonb,
+ CONSTRAINT null_project_id_for_template_only
+ CHECK ( (metrics.category != 'custom') != (metrics.project_id IS NOT NULL) ),
+ CONSTRAINT unique_key UNIQUE (predefined_key)
);
CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
CREATE TABLE IF NOT EXISTS metric_series
@@ -817,6 +829,29 @@ $$
);
CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id);
+
+ CREATE TABLE dashboards
+ (
+ dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
+ name text NOT NULL,
+ is_public boolean NOT NULL DEFAULT TRUE,
+ is_pinned boolean NOT NULL DEFAULT FALSE,
+ created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
+ deleted_at timestamp NULL DEFAULT NULL
+ );
+
+ CREATE TABLE dashboard_widgets
+ (
+ widget_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ dashboard_id integer NOT NULL REFERENCES dashboards (dashboard_id) ON DELETE CASCADE,
+ metric_id integer NOT NULL REFERENCES metrics (metric_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
+ created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
+ config jsonb NOT NULL DEFAULT '{}'::jsonb
+ );
+
CREATE TABLE IF NOT EXISTS searches
(
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
@@ -948,10 +983,13 @@ $$
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR
- first_paint_time > 0 OR
- dom_content_loaded_time > 0 OR
+ first_paint_time >
+ 0 OR
+ dom_content_loaded_time >
+ 0 OR
ttfb > 0 OR
- time_to_interactive > 0;
+ time_to_interactive >
+ 0;
CREATE INDEX IF NOT EXISTS pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
CREATE INDEX IF NOT EXISTS pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp);
@@ -1219,5 +1257,63 @@ $$
$$
LANGUAGE plpgsql;
+INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type, view_type)
+VALUES ('Captured sessions', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_sessions', 'predefined', 'overview'),
+ ('Request Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_request_load_time', 'predefined', 'overview'),
+ ('Page Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_page_load_time', 'predefined', 'overview'),
+ ('Image Load Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_image_load_time', 'predefined', 'overview'),
+ ('DOM Content Load Start', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_load_start', 'predefined', 'overview'),
+ ('First Meaningful paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_contentful_pixel', 'predefined', 'overview'),
+ ('No. of Visited Pages', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_visited_pages', 'predefined', 'overview'),
+ ('Session Duration', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_session_duration', 'predefined', 'overview'),
+ ('DOM Build Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_dom_buildtime', 'predefined', 'overview'),
+ ('Pages Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_pages_response_time', 'predefined', 'overview'),
+ ('Response Time', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_response_time', 'predefined', 'overview'),
+ ('First Paint', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_first_paint', 'predefined', 'overview'),
+ ('DOM Content Loaded', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_dom_content_loaded', 'predefined', 'overview'),
+ ('Time Till First byte', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_till_first_byte', 'predefined', 'overview'),
+ ('Time To Interactive', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_interactive', 'predefined', 'overview'),
+ ('Captured requests', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'count_requests', 'predefined', 'overview'),
+ ('Time To Render', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_time_to_render', 'predefined', 'overview'),
+ ('Memory Consumption', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_used_js_heap_size', 'predefined', 'overview'),
+ ('CPU Load', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_cpu', 'predefined', 'overview'),
+ ('Frame rate', 'overview', '{"col":1,"row":1,"position":0}', true, true, true, 'avg_fps', 'predefined', 'overview'),
+
+ ('Sessions Affected by JS Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_js_errors', 'predefined', 'barChart'),
+ ('Top Domains with 4xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_4xx', 'predefined', 'lineChart'),
+ ('Top Domains with 5xx Fetch Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'domains_errors_5xx', 'predefined', 'lineChart'),
+ ('Errors per Domain', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
+ ('Fetch Calls with Errors', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'calls_errors', 'predefined', 'table'),
+ ('Errors by Type', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'errors_per_type', 'predefined', 'barChart'),
+ ('Errors by Origin', 'errors', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_by_party', 'predefined', 'stackedBarChart'),
+
+ ('Speed Index by Location', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'speed_location', 'predefined', 'map'),
+ ('Slowest Domains', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'slowest_domains', 'predefined', 'table'),
+ ('Sessions per Browser', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'sessions_per_browser', 'predefined', 'table'),
+ ('Time To Render', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'time_to_render', 'predefined', 'areaChart'),
+ ('Sessions Impacted by Slow Pages', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'impacted_sessions_by_slow_pages', 'predefined', 'areaChart'),
+ ('Memory Consumption', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'memory_consumption', 'predefined', 'areaChart'),
+ ('CPU Load', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'cpu', 'predefined', 'areaChart'),
+ ('Frame Rate', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'fps', 'predefined', 'areaChart'),
+ ('Crashes', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'crashes', 'predefined', 'areaChart'),
+ ('Resources Loaded vs Visually Complete', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_vs_visually_complete', 'predefined', 'areaChart'),
+ ('DOM Build Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_dom_buildtime', 'predefined', 'areaChart'),
+ ('Pages Response Time', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time', 'predefined', 'areaChart'),
+ ('Pages Response Time Distribution', 'performance', '{"col":2,"row":2,"position":0}', true, true, true, 'pages_response_time_distribution', 'predefined', 'barChart'),
+
+ ('Missing Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'missing_resources', 'predefined', 'table'),
+ ('Slowest Resources', 'resources', '{"col":4,"row":2,"position":0}', true, true, true, 'slowest_resources', 'predefined', 'table'),
+ ('Resources Fetch Time', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_loading_time', 'predefined', 'table'),
+ ('Resource Loaded vs Response End', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resource_type_vs_response_end', 'predefined', 'stackedBarLineChart'),
+ ('Breakdown of Loaded Resources', 'resources', '{"col":2,"row":2,"position":0}', true, true, true, 'resources_count_by_type', 'predefined', 'stackedBarChart')
+ON CONFLICT (predefined_key) DO UPDATE
+ SET name=excluded.name,
+ category=excluded.category,
+ default_config=excluded.default_config,
+ is_predefined=excluded.is_predefined,
+ is_template=excluded.is_template,
+ is_public=excluded.is_public,
+ metric_type=excluded.metric_type,
+ view_type=excluded.view_type;
COMMIT;
\ No newline at end of file
diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js
index c044043a5..998a457df 100644
--- a/ee/utilities/servers/websocket-cluster.js
+++ b/ee/utilities/servers/websocket-cluster.js
@@ -14,6 +14,7 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
+const SESSION_RECONNECTED = "SESSION_RECONNECTED";
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
@@ -309,6 +310,7 @@ module.exports = {
debug && console.log(`notifying new session about agent-existence`);
let agents_ids = await get_all_agents_ids(io, socket);
io.to(socket.id).emit(AGENTS_CONNECTED, agents_ids);
+ socket.to(socket.peerId).emit(SESSION_RECONNECTED, socket.id);
}
} else if (c_sessions <= 0) {
diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js
index 0bd397d96..256286351 100644
--- a/ee/utilities/servers/websocket.js
+++ b/ee/utilities/servers/websocket.js
@@ -12,6 +12,7 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
+const SESSION_RECONNECTED = "SESSION_RECONNECTED";
let io;
const debug = process.env.debug === "1" || false;
@@ -287,6 +288,7 @@ module.exports = {
debug && console.log(`notifying new session about agent-existence`);
let agents_ids = await get_all_agents_ids(io, socket);
io.to(socket.id).emit(AGENTS_CONNECTED, agents_ids);
+ socket.to(socket.peerId).emit(SESSION_RECONNECTED, socket.id);
}
} else if (c_sessions <= 0) {
diff --git a/frontend/app/Router.js b/frontend/app/Router.js
index 89fbdd343..f42f3c456 100644
--- a/frontend/app/Router.js
+++ b/frontend/app/Router.js
@@ -1,3 +1,4 @@
+import React, { lazy, Suspense } from 'react';
import { Switch, Route, Redirect } from 'react-router';
import { BrowserRouter, withRouter } from 'react-router-dom';
import { connect } from 'react-redux';
@@ -5,26 +6,29 @@ import { Notification } from 'UI';
import { Loader } from 'UI';
import { fetchUserInfo } from 'Duck/user';
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
-import Login from 'Components/Login/Login';
-import ForgotPassword from 'Components/ForgotPassword/ForgotPassword';
-import UpdatePassword from 'Components/UpdatePassword/UpdatePassword';
-import ClientPure from 'Components/Client/Client';
-import OnboardingPure from 'Components/Onboarding/Onboarding';
-import SessionPure from 'Components/Session/Session';
-import LiveSessionPure from 'Components/Session/LiveSession';
-import AssistPure from 'Components/Assist';
-import BugFinderPure from 'Components/BugFinder/BugFinder';
-import DashboardPure from 'Components/Dashboard/Dashboard';
-import ErrorsPure from 'Components/Errors/Errors';
+const Login = lazy(() => import('Components/Login/Login'));
+const ForgotPassword = lazy(() => import('Components/ForgotPassword/ForgotPassword'));
+const UpdatePassword = lazy(() => import('Components/UpdatePassword/UpdatePassword'));
+const SessionPure = lazy(() => import('Components/Session/Session'));
+const LiveSessionPure = lazy(() => import('Components/Session/LiveSession'));
+const OnboardingPure = lazy(() => import('Components/Onboarding/Onboarding'));
+const ClientPure = lazy(() => import('Components/Client/Client'));
+const AssistPure = lazy(() => import('Components/Assist'));
+const BugFinderPure = lazy(() => import('Components/BugFinder/BugFinder'));
+const DashboardPure = lazy(() => import('Components/Dashboard/NewDashboard'));
+const ErrorsPure = lazy(() => import('Components/Errors/Errors'));
+const FunnelDetails = lazy(() => import('Components/Funnels/FunnelDetails'));
+const FunnelIssueDetails = lazy(() => import('Components/Funnels/FunnelIssueDetails'));
+import WidgetViewPure from 'Components/Dashboard/components/WidgetView';
import Header from 'Components/Header/Header';
// import ResultsModal from 'Shared/Results/ResultsModal';
-import FunnelDetails from 'Components/Funnels/FunnelDetails';
-import FunnelIssueDetails from 'Components/Funnels/FunnelIssueDetails';
import { fetchList as fetchIntegrationVariables } from 'Duck/customField';
import { fetchList as fetchSiteList } from 'Duck/site';
import { fetchList as fetchAnnouncements } from 'Duck/announcements';
import { fetchList as fetchAlerts } from 'Duck/alerts';
import { fetchWatchdogStatus } from 'Duck/watchdogs';
+import { dashboardService } from "App/services";
+import { withStore } from 'App/mstore'
import APIClient from './api_client';
import * as routes from './routes';
@@ -32,9 +36,12 @@ import { OB_DEFAULT_TAB } from 'App/routes';
import Signup from './components/Signup/Signup';
import { fetchTenants } from 'Duck/user';
import { setSessionPath } from 'Duck/sessions';
+import { ModalProvider } from './components/Modal';
+import ModalRoot from './components/Modal/ModalRoot';
const BugFinder = withSiteIdUpdater(BugFinderPure);
const Dashboard = withSiteIdUpdater(DashboardPure);
+const WidgetView = withSiteIdUpdater(WidgetViewPure);
const Session = withSiteIdUpdater(SessionPure);
const LiveSession = withSiteIdUpdater(LiveSessionPure);
const Assist = withSiteIdUpdater(AssistPure);
@@ -46,7 +53,15 @@ const FunnelIssue = withSiteIdUpdater(FunnelIssueDetails);
const withSiteId = routes.withSiteId;
const withObTab = routes.withObTab;
+const METRICS_PATH = routes.metrics();
+const METRICS_DETAILS = routes.metricDetails();
+
const DASHBOARD_PATH = routes.dashboard();
+const DASHBOARD_SELECT_PATH = routes.dashboardSelected();
+const DASHBOARD_METRIC_CREATE_PATH = routes.dashboardMetricCreate();
+const DASHBOARD_METRIC_DETAILS_PATH = routes.dashboardMetricDetails();
+
+// const WIDGET_PATAH = routes.dashboardMetric();
const SESSIONS_PATH = routes.sessions();
const ASSIST_PATH = routes.assist();
const ERRORS_PATH = routes.errors();
@@ -62,6 +77,7 @@ const CLIENT_PATH = routes.client();
const ONBOARDING_PATH = routes.onboarding();
const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB);
+@withStore
@withRouter
@connect((state) => {
const siteId = state.getIn([ 'user', 'siteId' ]);
@@ -108,6 +124,8 @@ class Router extends React.Component {
fetchInitialData = () => {
Promise.all([
this.props.fetchUserInfo().then(() => {
+ const { mstore } = this.props
+ mstore.initClient();
this.props.fetchIntegrationVariables()
}),
this.props.fetchSiteList().then(() => {
@@ -153,54 +171,78 @@ class Router extends React.Component {
{!hideHeader && }
-
-
-
- {
- const client = new APIClient(jwt);
- switch (location.pathname) {
- case '/integrations/slack':
- client.post('integrations/slack/add', {
- code: location.search.split('=')[ 1 ],
- state: tenantId,
- });
- break;
+ }>
+
+
+
+
+
+ {
+ const client = new APIClient(jwt);
+ switch (location.pathname) {
+ case '/integrations/slack':
+ client.post('integrations/slack/add', {
+ code: location.search.split('=')[ 1 ],
+ state: tenantId,
+ });
+ break;
+ }
+ return ;
}
- return ;
}
- }
- />
- { onboarding &&
-
- }
- { siteIdList.length === 0 &&
-
- }
-
-
-
-
-
-
-
-
-
- } />
- { routes.redirects.map(([ fr, to ]) => (
-
- )) }
-
+ />
+ { onboarding &&
+
+ }
+ { siteIdList.length === 0 &&
+
+ }
+
+
+
+
+
+
+
+
+
+
+
+ {/*
+
+
+
+ */}
+
+
+
+
+
+
+
+
+
+ } />
+ { routes.redirects.map(([ fr, to ]) => (
+
+ )) }
+
+
+
+
+
+ :
+ }>
+
+
+
+ { !existingTenant && }
+
- :
-
-
-
- { !existingTenant && }
-
- ;
+ ;
}
}
diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js
index a42f19468..626f033ea 100644
--- a/frontend/app/api_client.js
+++ b/frontend/app/api_client.js
@@ -1,5 +1,4 @@
import store from 'App/store';
-
import { queried } from './routes';
const siteIdRequiredPaths = [
@@ -24,6 +23,8 @@ const siteIdRequiredPaths = [
'/assist',
'/heatmaps',
'/custom_metrics',
+ '/dashboards',
+ '/metrics'
// '/custom_metrics/sessions',
];
@@ -68,12 +69,16 @@ export default class APIClient {
this.siteId = siteId;
}
- fetch(path, params, options = { clean: true }) {
+ fetch(path, params, options = { clean: true }) {
if (params !== undefined) {
const cleanedParams = options.clean ? clean(params) : params;
this.init.body = JSON.stringify(cleanedParams);
}
+ if (this.init.method === 'GET') {
+ delete this.init.body;
+ }
+
let fetch = window.fetch;
diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html
index a9d4b0f62..03300b45c 100644
--- a/frontend/app/assets/index.html
+++ b/frontend/app/assets/index.html
@@ -12,6 +12,7 @@
+