feat(chalice): full experimental config
feat(db): CH create new tables
This commit is contained in:
parent
c64a2bed0b
commit
80115b6006
31 changed files with 583 additions and 212 deletions
4
ee/api/.gitignore
vendored
4
ee/api/.gitignore
vendored
|
|
@ -177,6 +177,8 @@ chalicelib/saas
|
|||
README/*
|
||||
Pipfile
|
||||
|
||||
/.local/
|
||||
|
||||
/chalicelib/core/alerts.py
|
||||
/chalicelib/core/alerts_processor.py
|
||||
/chalicelib/core/announcements.py
|
||||
|
|
@ -260,4 +262,4 @@ Pipfile
|
|||
/build_alerts.sh
|
||||
/routers/subs/metrics.py
|
||||
/routers/subs/v1_api.py
|
||||
/chalicelib/core/dashboards.py
|
||||
/chalicelib/core/dashboards.py
|
||||
|
|
|
|||
|
|
@ -1,13 +1,28 @@
|
|||
from decouple import config
|
||||
import logging
|
||||
|
||||
if config("LEGACY_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using legacy search")
|
||||
from . import autocomplete as autocomplete
|
||||
from . import sessions as sessions
|
||||
from . import errors as errors
|
||||
from . import metrics as metrics
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using experimental sessions search")
|
||||
from . import sessions_exp as sessions
|
||||
else:
|
||||
from . import autocomplete_ee as autocomplete
|
||||
from . import sessions_ee as sessions
|
||||
from . import errors_ee as errors
|
||||
from . import metrics_new as metrics
|
||||
from . import sessions as sessions
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
print(">>> Using experimental autocomplete")
|
||||
from . import autocomplete_exp as autocomplete
|
||||
else:
|
||||
from . import autocomplete as autocomplete
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using experimental error search")
|
||||
from . import errors_exp as errors
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
print(">>> Using experimental metrics")
|
||||
from . import metrics_exp as metrics
|
||||
else:
|
||||
from . import metrics as metrics
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import math
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import pg_client, sessions_helper
|
||||
from chalicelib.utils import pg_client, exp_ch_helper
|
||||
from chalicelib.utils import args_transformer
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
|
@ -169,7 +169,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
ch_query = f"""\
|
||||
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT sessions.session_id) AS value
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;\
|
||||
|
|
@ -191,7 +191,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
startTimestamp = endTimestamp - diff
|
||||
|
||||
ch_query = f""" SELECT COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -223,7 +223,7 @@ def get_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimesta
|
|||
ch_query = f"""\
|
||||
SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT errors.session_id) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;\
|
||||
|
|
@ -253,7 +253,7 @@ def __count_distinct_errors(ch, project_id, startTimestamp, endTimestamp, ch_sub
|
|||
ch_query = f"""\
|
||||
SELECT
|
||||
COUNT(DISTINCT errors.message) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
count = ch.execute(query=ch_query,
|
||||
params={"project_id": project_id, "startTimestamp": startTimestamp,
|
||||
|
|
@ -283,13 +283,13 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
errors.message AS error,
|
||||
COUNT(1) AS count,
|
||||
COUNT(DISTINCT errors.session_id) AS sessions
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY errors.error_id, errors.message) AS errors_chart
|
||||
INNER JOIN (SELECT error_id AS error_id,
|
||||
toUnixTimestamp(MAX(datetime))*1000 AS lastOccurrenceAt,
|
||||
toUnixTimestamp(MIN(datetime))*1000 AS firstOccurrenceAt
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE event_type='ERROR' AND project_id=%(project_id)s
|
||||
GROUP BY error_id) AS errors_time USING(error_id)
|
||||
ORDER BY sessions DESC, count DESC LIMIT 10;"""
|
||||
|
|
@ -307,7 +307,7 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
ch_query = f"""\
|
||||
SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -351,7 +351,7 @@ def __get_page_metrics(ch, project_id, startTimestamp, endTimestamp, **args):
|
|||
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(NULLIF(pages.dom_content_loaded_event_end ,0)),0) AS avg_dom_content_load_start,
|
||||
COALESCE(avgOrNull(NULLIF(pages.first_contentful_paint_time,0)),0) AS avg_first_contentful_pixel
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -382,7 +382,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
|
|||
ch_sub_query += meta_condition
|
||||
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS avg_page_load_time
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)} AND pages.load_event_end>0;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -395,7 +395,7 @@ def __get_application_activity(ch, project_id, startTimestamp, endTimestamp, **a
|
|||
ch_sub_query += meta_condition
|
||||
ch_sub_query.append("resources.type= %(type)s")
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0;"""
|
||||
row = ch.execute(query=ch_query,
|
||||
params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
|
||||
|
|
@ -442,7 +442,7 @@ def __get_user_activity(ch, project_id, startTimestamp, endTimestamp, **args):
|
|||
ch_sub_query.append("(sessions.pages_count>0 OR sessions.duration>0)")
|
||||
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(NULLIF(sessions.pages_count,0))),0) AS avg_visited_pages,
|
||||
COALESCE(avgOrNull(NULLIF(sessions.duration,0)),0) AS avg_session_duration
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -471,7 +471,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
ch_query = f"""SELECT resources.url,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
|
||||
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
|
||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
||||
|
|
@ -487,7 +487,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
ch_query = f"""SELECT url,
|
||||
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
|
||||
GROUP BY url, timestamp
|
||||
ORDER BY url, timestamp;"""
|
||||
|
|
@ -551,7 +551,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
AND resources.type = 'img' AND resources.duration>0
|
||||
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
|
||||
|
|
@ -564,7 +564,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
|
|||
density=density, neutral={"avg": 0})]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
AND resources.type = 'fetch' AND resources.duration>0
|
||||
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
|
||||
|
|
@ -582,7 +582,7 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
|
|||
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.load_event_end),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)} AND pages.load_event_end>0
|
||||
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
|
||||
GROUP BY timestamp
|
||||
|
|
@ -654,7 +654,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url_path))), 1, 5)) AS value,
|
||||
type AS key
|
||||
FROM {sessions_helper.get_main_resources_table(0)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(0)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY type
|
||||
ORDER BY type ASC;"""
|
||||
|
|
@ -668,7 +668,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
elif resource_type == "ALL" and events_only:
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT DISTINCT value AS value, type AS key
|
||||
FROM {sessions_helper.get_autocomplete_table(0)} autocomplete
|
||||
FROM {exp_ch_helper.get_autocomplete_table(0)} autocomplete
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
AND positionUTF8(lowerUTF8(value), %(value)s) != 0
|
||||
AND type IN ('LOCATION','INPUT','CLICK')
|
||||
|
|
@ -686,7 +686,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT DISTINCT url_path AS value,
|
||||
%(resource_type)s AS key
|
||||
FROM {sessions_helper.get_main_resources_table(0)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(0)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query,
|
||||
|
|
@ -702,7 +702,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
ch_query = f"""SELECT
|
||||
DISTINCT value AS value,
|
||||
'LOCATION' AS key
|
||||
FROM {sessions_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
FROM {exp_ch_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query,
|
||||
|
|
@ -715,7 +715,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
ch_sub_query.append("positionUTF8(lowerUTF8(value), %(value)s) != 0")
|
||||
ch_sub_query.append("type='INPUT")
|
||||
ch_query = f"""SELECT DISTINCT label AS value, 'INPUT' AS key
|
||||
FROM {sessions_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
FROM {exp_ch_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query,
|
||||
|
|
@ -728,7 +728,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
ch_sub_query.append("positionUTF8(lowerUTF8(value), %(value)s) != 0")
|
||||
ch_sub_query.append("type='CLICK'")
|
||||
ch_query = f"""SELECT DISTINCT value AS value, 'CLICK' AS key
|
||||
FROM {sessions_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
FROM {exp_ch_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query,
|
||||
|
|
@ -745,7 +745,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT DISTINCT sessions.{METADATA_FIELDS[key]} AS value,
|
||||
%(key)s AS key
|
||||
FROM {sessions_helper.get_main_sessions_table(0)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(0)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query,
|
||||
|
|
@ -757,19 +757,19 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT DISTINCT sessions.{SESSIONS_META_FIELDS[key]} AS value,
|
||||
'{key}' AS key
|
||||
FROM {sessions_helper.get_main_sessions_table(0)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(0)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
LIMIT 10;"""
|
||||
rows = ch.execute(query=ch_query, params={"project_id": project_id, "value": text, "key": key,
|
||||
"platform_0": platform})
|
||||
else:
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = """SELECT DISTINCT value AS value,
|
||||
type AS key
|
||||
FROM final.autocomplete
|
||||
WHERE project_id = toUInt16(2460)
|
||||
AND positionCaseInsensitiveUTF8(value, %(value)s) != 0
|
||||
LIMIT 10 BY type"""
|
||||
ch_query = f"""SELECT DISTINCT value AS value,
|
||||
type AS key
|
||||
FROM {exp_ch_helper.get_autocomplete_table(0)} AS autocomplete
|
||||
WHERE project_id = toUInt16(2460)
|
||||
AND positionCaseInsensitiveUTF8(value, %(value)s) != 0
|
||||
LIMIT 10 BY type"""
|
||||
|
||||
# print(ch.format(query=ch_query, params={"project_id": project_id, "value": text, "key": key,
|
||||
# "platform_0": platform}))
|
||||
|
|
@ -793,7 +793,7 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT resources.url_path AS key,
|
||||
COUNT(1) AS doc_count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY url_path
|
||||
ORDER BY doc_count DESC
|
||||
|
|
@ -810,7 +810,7 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
|
|||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COUNT(1) AS doc_count,
|
||||
toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -842,7 +842,7 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
resources.url_path, COUNT(1) AS doc_count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp, resources.url_path
|
||||
ORDER BY timestamp, doc_count DESC
|
||||
|
|
@ -899,7 +899,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -909,7 +909,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
|
|||
"value": url, "type": type, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"avg": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
|
||||
|
|
@ -933,7 +933,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.dom_building_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -943,7 +943,7 @@ def get_pages_dom_build_time(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
"value": url, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_building_time),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
|
||||
|
|
@ -976,7 +976,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT any(url) AS url, any(type) AS type, name,
|
||||
COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY name
|
||||
ORDER BY avg DESC
|
||||
|
|
@ -994,7 +994,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
ch_query = f"""SELECT name,
|
||||
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
AND name IN %(names)s
|
||||
GROUP BY name,timestamp
|
||||
|
|
@ -1031,7 +1031,7 @@ def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT user_country, COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY user_country
|
||||
ORDER BY user_country;"""
|
||||
|
|
@ -1053,8 +1053,8 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT sessions.user_country, COALESCE(avgOrNull(pages.speed_index),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
INNER JOIN {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions USING (session_id)
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
INNER JOIN {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY sessions.user_country
|
||||
ORDER BY value ,sessions.user_country;"""
|
||||
|
|
@ -1064,7 +1064,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
# print(ch.format(query=ch_query, params=params))
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.speed_index),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
|
||||
|
|
@ -1085,7 +1085,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.response_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1096,7 +1096,7 @@ def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1
|
|||
"value": url, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
results = {"value": avg,
|
||||
|
|
@ -1119,7 +1119,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT pages.response_time AS response_time,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY response_time
|
||||
ORDER BY response_time;"""
|
||||
|
|
@ -1128,12 +1128,12 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"]
|
||||
quantiles_keys = [50, 90, 95, 99]
|
||||
ch_query = f"""SELECT quantilesExact({",".join([str(i / 100) for i in quantiles_keys])})(pages.response_time) AS values
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
quantiles = ch.execute(query=ch_query, params=params)
|
||||
result = {
|
||||
|
|
@ -1233,7 +1233,7 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT intDiv(toHour(sessions.datetime),2)*2 AS hour,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY hour
|
||||
ORDER BY hour ASC;"""
|
||||
|
|
@ -1261,8 +1261,8 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
COALESCE(avgOrNull(if(pages.dom_content_loaded_event_time>0,pages.dom_content_loaded_event_time,null)),0) AS avg_dom_content_loaded,
|
||||
COALESCE(avgOrNull(if(pages.ttfb>0,pages.ttfb,null)),0) AS avg_till_first_bit,
|
||||
COALESCE(avgOrNull(if(pages.time_to_interactive>0,pages.time_to_interactive,null)),0) AS avg_time_to_interactive,
|
||||
(SELECT COUNT(1) FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages WHERE {" AND ".join(ch_sub_query)}) AS count_requests
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
(SELECT COUNT(1) FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages WHERE {" AND ".join(ch_sub_query)}) AS count_requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
AND (isNotNull(pages.response_time) AND pages.response_time>0 OR
|
||||
isNotNull(pages.first_paint) AND pages.first_paint>0 OR
|
||||
|
|
@ -1292,7 +1292,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.visually_complete),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1302,7 +1302,7 @@ def get_time_to_render(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
"endTimestamp": endTimestamp, "value": url, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.visually_complete),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
results = {"value": avg, "chart": __complete_missing_steps(rows=rows, start_time=startTimestamp,
|
||||
|
|
@ -1328,10 +1328,10 @@ def get_impacted_sessions_by_slow_pages(project_id, startTimestamp=TimeUTC.now(d
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT pages.session_id) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
AND (pages.response_time)>(SELECT COALESCE(avgOrNull(pages.response_time),0)
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(sch_sub_query)})*2
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1358,7 +1358,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp ASC;"""
|
||||
|
|
@ -1368,7 +1368,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_used_js_heap_size),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"value": avg,
|
||||
|
|
@ -1391,7 +1391,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(performance.avg_cpu),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp ASC;"""
|
||||
|
|
@ -1401,7 +1401,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_cpu),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"value": avg,
|
||||
|
|
@ -1424,7 +1424,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(performance.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(performance.avg_fps),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp ASC;"""
|
||||
|
|
@ -1434,7 +1434,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(performance.avg_fps),0) AS avg
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS performance
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"value": avg,
|
||||
|
|
@ -1460,7 +1460,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(1) AS value
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1478,7 +1478,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
groupArray([bv.user_browser_version, toString(bv.count)]) AS versions
|
||||
FROM (
|
||||
SELECT sessions.user_browser
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY sessions.user_browser
|
||||
ORDER BY COUNT(1) DESC
|
||||
|
|
@ -1489,7 +1489,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
SELECT sessions.user_browser,
|
||||
sessions.user_browser_version,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY sessions.user_browser,
|
||||
sessions.user_browser_version
|
||||
|
|
@ -1542,7 +1542,7 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
groupArray([domain, toString(count)]) AS keys
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(requests.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
requests.url_host AS domain, COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY timestamp,requests.url_host
|
||||
ORDER BY timestamp, count DESC
|
||||
|
|
@ -1587,7 +1587,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.
|
|||
groupArray([domain, toString(count)]) AS keys
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(requests.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
requests.url_host AS domain, COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY timestamp,requests.url_host
|
||||
ORDER BY timestamp, count DESC
|
||||
|
|
@ -1639,7 +1639,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT resources.url_host AS domain,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY resources.url_host
|
||||
ORDER BY value DESC
|
||||
|
|
@ -1649,7 +1649,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS avg
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
|
||||
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
|
||||
|
|
@ -1667,7 +1667,7 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
ch_query = f"""SELECT
|
||||
requests.url_host AS domain,
|
||||
COUNT(1) AS errors_count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY requests.url_host
|
||||
ORDER BY errors_count DESC
|
||||
|
|
@ -1693,7 +1693,7 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
(
|
||||
SELECT sessions.user_browser,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY sessions.user_browser
|
||||
ORDER BY count DESC
|
||||
|
|
@ -1704,7 +1704,7 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
|
|||
SELECT sessions.user_browser,
|
||||
sessions.user_browser_version,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY
|
||||
sessions.user_browser,
|
||||
|
|
@ -1742,7 +1742,7 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
|
|||
COUNT(1) AS all_requests,
|
||||
SUM(if(intDiv(requests.status, 100) == 4, 1, 0)) AS _4xx,
|
||||
SUM(if(intDiv(requests.status, 100) == 5, 1, 0)) AS _5xx
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY requests.method, requests.url_hostpath
|
||||
ORDER BY (_4xx + _5xx) DESC, all_requests DESC
|
||||
|
|
@ -1767,7 +1767,7 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now
|
|||
ch_query = f"""SELECT requests.method,
|
||||
requests.url_hostpath,
|
||||
COUNT(1) AS all_requests
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY requests.method, requests.url_hostpath
|
||||
ORDER BY all_requests DESC
|
||||
|
|
@ -1810,7 +1810,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
|
|||
SUM(events.event_type = 'REQUEST' AND intDiv(events.status, 100) == 5) AS _5xx,
|
||||
SUM(events.event_type = 'ERROR' AND events.source == 'js_exception') AS js,
|
||||
SUM(events.event_type = 'ERROR' AND events.source != 'js_exception') AS integrations
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS events
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS events
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1848,7 +1848,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
|
|||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(1) AS total,
|
||||
SUM(if(resources.type='fetch',1,0)) AS xhr
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1860,7 +1860,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
|
|||
neutral={"total": 0, "xhr": 0})
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.response_end),0) AS avg_response_end
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart_response_end)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -1885,7 +1885,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
|
|||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT errors.session_id) AS sessions_count,
|
||||
COUNT(DISTINCT errors.error_id) AS errors_count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;;"""
|
||||
|
|
@ -1896,7 +1896,7 @@ def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(de
|
|||
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
|
||||
ch_query = f"""SELECT COUNT(DISTINCT errors.session_id) AS sessions_count,
|
||||
COUNT(DISTINCT errors.error_id) AS errors_count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query_chart)};"""
|
||||
counts = ch.execute(query=ch_query,
|
||||
params={"step_size": step_size,
|
||||
|
|
@ -1928,7 +1928,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
|
|||
( SELECT resources.session_id,
|
||||
MIN(resources.datetime) AS base_datetime,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY resources.session_id
|
||||
) AS s
|
||||
|
|
@ -1937,7 +1937,7 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
|
|||
type,
|
||||
COALESCE(avgOrNull(NULLIF(count,0)),0) AS xavg
|
||||
FROM (SELECT resources.session_id, resources.type, COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY resources.session_id, resources.type) AS ss
|
||||
GROUP BY ss.session_id, ss.type) AS t USING (session_id)
|
||||
|
|
@ -1987,7 +1987,7 @@ def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_day
|
|||
FROM(SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
resources.type,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp,resources.type
|
||||
ORDER BY timestamp) AS t
|
||||
|
|
@ -2026,7 +2026,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
FROM
|
||||
(
|
||||
SELECT requests.datetime, requests.url_host
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
) AS sub_requests
|
||||
CROSS JOIN
|
||||
|
|
@ -2034,7 +2034,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
SELECT
|
||||
rs.url_host,
|
||||
COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS rs
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS rs
|
||||
WHERE {" AND ".join(sch_sub_query)}
|
||||
GROUP BY rs.url_host
|
||||
ORDER BY count DESC
|
||||
|
|
@ -2078,7 +2078,7 @@ def __get_application_activity_avg_page_load_time(ch, project_id, startTimestamp
|
|||
ch_sub_query += meta_condition
|
||||
ch_sub_query.append("pages.load_event_end>0")
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.load_event_end),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -2117,7 +2117,7 @@ def get_performance_avg_page_load_time(ch, project_id, startTimestamp=TimeUTC.no
|
|||
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.load_event_end),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
|
||||
GROUP BY timestamp
|
||||
|
|
@ -2159,7 +2159,7 @@ def __get_application_activity_avg_image_load_time(ch, project_id, startTimestam
|
|||
ch_sub_query.append("resources.duration>0")
|
||||
ch_query = f"""\
|
||||
SELECT COALESCE(avgOrNull(resources.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
row = ch.execute(query=ch_query,
|
||||
params={"project_id": project_id, "type": 'img', "startTimestamp": startTimestamp,
|
||||
|
|
@ -2193,7 +2193,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
|
|||
ch_sub_query_chart.append("resources.duration>0")
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
AND resources.type = 'img'
|
||||
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
|
||||
|
|
@ -2234,7 +2234,7 @@ def __get_application_activity_avg_request_load_time(ch, project_id, startTimest
|
|||
ch_sub_query.append("resources.type= %(type)s")
|
||||
ch_sub_query.append("resources.duration>0")
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(resources.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
row = ch.execute(query=ch_query,
|
||||
params={"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp,
|
||||
|
|
@ -2267,7 +2267,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
|
|||
ch_sub_query_chart.append("resources.duration>0")
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(resources.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
AND resources.type = 'fetch'
|
||||
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
|
||||
|
|
@ -2313,7 +2313,7 @@ def __get_page_metrics_avg_dom_content_load_start(ch, project_id, startTimestamp
|
|||
ch_sub_query += meta_condition
|
||||
ch_sub_query.append("pages.dom_content_loaded_event_end>0")
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -2334,7 +2334,7 @@ def __get_page_metrics_avg_dom_content_load_start_chart(ch, project_id, startTim
|
|||
ch_sub_query_chart.append("pages.dom_content_loaded_event_end>0")
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.dom_content_loaded_event_end),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2378,7 +2378,7 @@ def __get_page_metrics_avg_first_contentful_pixel(ch, project_id, startTimestamp
|
|||
# changed dom_content_loaded_event_start to dom_content_loaded_event_end
|
||||
ch_query = f"""\
|
||||
SELECT COALESCE(avgOrNull(pages.first_contentful_paint_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "type": 'fetch', "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -2399,7 +2399,7 @@ def __get_page_metrics_avg_first_contentful_pixel_chart(ch, project_id, startTim
|
|||
ch_sub_query_chart.append("pages.first_contentful_paint_time>0")
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.first_contentful_paint_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2444,7 +2444,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim
|
|||
|
||||
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value
|
||||
FROM (SELECT COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY session_id) AS groupped_data
|
||||
WHERE count>0;"""
|
||||
|
|
@ -2468,7 +2468,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
|
|||
ch_query = f"""SELECT timestamp, COALESCE(avgOrNull(count), 0) AS value
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
session_id, COUNT(1) AS count
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp,session_id
|
||||
ORDER BY timestamp) AS groupped_data
|
||||
|
|
@ -2515,7 +2515,7 @@ def __get_user_activity_avg_session_duration(ch, project_id, startTimestamp, end
|
|||
ch_sub_query.append("sessions.duration>0")
|
||||
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(sessions.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
|
||||
**__get_constraint_values(args)}
|
||||
|
|
@ -2537,7 +2537,7 @@ def __get_user_activity_avg_session_duration_chart(ch, project_id, startTimestam
|
|||
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(sessions.duration),0) AS value
|
||||
FROM {sessions_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
FROM {exp_ch_helper.get_main_sessions_table(startTimestamp)} AS sessions
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2565,7 +2565,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
|
|||
ch_sub_query_chart.append("pages.url_path = %(value)s")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0;"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2575,7 +2575,7 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
|
|||
results = rows[0]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.response_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.response_time) AND pages.response_time>0
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2604,7 +2604,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
|
|||
ch_sub_query_chart.append("pages.url_path = %(value)s")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COUNT(1) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2614,7 +2614,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
|
|||
result = rows[0]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
|
||||
COUNT(1) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2644,7 +2644,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
|
|||
ch_sub_query_chart.append("pages.url_path = %(value)s")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.first_paint),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0;"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2654,7 +2654,7 @@ def get_top_metrics_avg_first_paint(project_id, startTimestamp=TimeUTC.now(delta
|
|||
results = rows[0]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.first_paint),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.first_paint) AND pages.first_paint>0
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;;"""
|
||||
|
|
@ -2689,7 +2689,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
|
|||
ch_sub_query_chart.append("pages.dom_content_loaded_event_time>0")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2699,7 +2699,7 @@ def get_top_metrics_avg_dom_content_loaded(project_id, startTimestamp=TimeUTC.no
|
|||
results = helper.dict_to_camel_case(rows[0])
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2733,7 +2733,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
|
|||
ch_sub_query_chart.append("pages.ttfb>0")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.ttfb),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2743,7 +2743,7 @@ def get_top_metrics_avg_till_first_bit(project_id, startTimestamp=TimeUTC.now(de
|
|||
results = rows[0]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.ttfb),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -2777,7 +2777,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
|
|||
ch_sub_query_chart.append("pages.time_to_interactive >0")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
ch_query = f"""SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query)};"""
|
||||
params = {"step_size": step_size, "project_id": project_id,
|
||||
"startTimestamp": startTimestamp,
|
||||
|
|
@ -2787,7 +2787,7 @@ def get_top_metrics_avg_time_to_interactive(project_id, startTimestamp=TimeUTC.n
|
|||
results = rows[0]
|
||||
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
|
||||
COALESCE(avgOrNull(pages.time_to_interactive),0) AS value
|
||||
FROM {sessions_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
|
||||
WHERE {" AND ".join(ch_sub_query_chart)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp;"""
|
||||
|
|
@ -4,7 +4,7 @@ import schemas
|
|||
import schemas_ee
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, sessions_helper
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
SESSION_PROJECTION_COLS = """\
|
||||
|
|
@ -471,7 +471,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
extra_event = None
|
||||
if metric_of == schemas.TableMetricOfType.visited_url:
|
||||
extra_event = f"""SELECT DISTINCT ev.session_id, ev.path
|
||||
FROM {sessions_helper.get_main_events_table(data.startDate)} AS ev
|
||||
FROM {exp_ch_helper.get_main_events_table(data.startDate)} AS ev
|
||||
WHERE ev.datetime >= toDateTime(%(startDate)s / 1000)
|
||||
AND ev.datetime <= toDateTime(%(endDate)s / 1000)
|
||||
AND ev.project_id = %(project_id)s
|
||||
|
|
@ -1242,8 +1242,8 @@ def search_query_parts_ch(data, error_status, errors_only, favorite_only, issue,
|
|||
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
||||
MAIN_EVENTS_TABLE = sessions_helper.get_main_events_table(data.startDate)
|
||||
MAIN_SESSIONS_TABLE = sessions_helper.get_main_sessions_table(data.startDate)
|
||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startDate)
|
||||
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startDate)
|
||||
|
||||
full_args["MAIN_EVENTS_TABLE"] = MAIN_EVENTS_TABLE
|
||||
full_args["MAIN_SESSIONS_TABLE"] = MAIN_SESSIONS_TABLE
|
||||
|
|
@ -4,7 +4,6 @@ import clickhouse_driver
|
|||
from decouple import config
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
|
|
|
|||
30
ee/api/chalicelib/utils/exp_ch_helper.py
Normal file
30
ee/api/chalicelib/utils/exp_ch_helper.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from decouple import config
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
if config("EXP_7D_MV", cast=bool, default=True):
|
||||
print(">>> Using experimental last 7 days materialized views")
|
||||
|
||||
|
||||
def get_main_events_table(timestamp):
|
||||
return "experimental.events_l7d_mv" \
|
||||
if config("EXP_7D_MV", cast=bool, default=True) \
|
||||
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events"
|
||||
|
||||
|
||||
def get_main_sessions_table(timestamp):
|
||||
return "experimental.sessions_l7d_mv" \
|
||||
if config("EXP_7D_MV", cast=bool, default=True) \
|
||||
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
|
||||
|
||||
|
||||
def get_main_resources_table(timestamp):
|
||||
return "experimental.resources_l7s_mv" \
|
||||
if config("EXP_7D_MV", cast=bool, default=True) \
|
||||
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources"
|
||||
|
||||
|
||||
def get_autocomplete_table(timestamp):
|
||||
return "experimental.autocomplete"
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_main_events_table(timestamp):
|
||||
return "final.events_l7d_mv" if timestamp >= TimeUTC.now(delta_days=-7) else "final.events"
|
||||
|
||||
|
||||
def get_main_sessions_table(timestamp):
|
||||
return "final.sessions_l7d_mv" if timestamp >= TimeUTC.now(delta_days=-7) else "final.sessions"
|
||||
|
||||
|
||||
def get_main_resources_table(timestamp):
|
||||
return "final.resources"
|
||||
|
||||
|
||||
def get_autocomplete_table(timestamp):
|
||||
return "final.autocomplete"
|
||||
|
|
@ -61,4 +61,8 @@ sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
|
|||
stage=default-ee
|
||||
version_number=1.0.0
|
||||
FS_DIR=/mnt/efs
|
||||
LEGACY_SEARCH=false
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_7D_MV=true
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
MACHINE ,QUERY ,TABLE ,TIME(max_threads=auto(2)),TIME (max_threads=4),TIME (max_threads=4) 1W,TIME (max_threads=4) 1W_mv,TIME 1d (max_threads=4) 1W_mv,TIME 1d (max_threads=4) 1d_mv,REMARK
|
||||
r5.large (16Gb) ,Q1 ,massive2.events7 ,timeout ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,Q1 ,massive2.events7 ,12s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),Q1 ,massive2.events7 ,7s ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q1 ,massive2.events7 ,24s ,19s ,17s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q1.1 ,massive2.events7 ,16s ,14s ,12s ,- ,- ,- ,WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q1 ,massive_split.events_s,23s ,18s ,16s ,3s ,1s ,0.8s ,
|
||||
r5.xlarge (32Gb) ,Q1.2 ,massive_split.events_s,timeout ,27s ,23s ,2s ,0.8s ,0.7s ,events INNER JOIN meta WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q1.2.1 ,massive_split.events_s,timeout ,timeout ,27s ,4s ,1.6s ,0.8s ,events INNER JOIN meta WHERE ALL AND user_id
|
||||
r5.xlarge (32Gb) ,Q1.3 ,massive_split.events_s,15s ,13s ,11s ,2s ,0.7s ,0.6s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q1.4 ,massive_split.events_s,memory ,memory ,memory ,4s ,0.7s ,0.6s ,SUBQUERY WHERE user_id INNER JOIN events
|
||||
r5.xlarge (32Gb) ,Q1.5 ,massive_split.events_s,11s ,10s ,8s ,1s ,0.6s ,0.6s ,WHERE session_id IN SUBQUERY WHERE user_id
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
r5.large (16Gb) ,Q2 ,massive2.events7 ,timeout ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,Q2 ,massive2.events7 ,12s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),Q2 ,massive2.events7 ,7s ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q2 ,massive2.events7 ,24s ,20s ,17s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q2.1 ,massive2.events7 ,21s ,14s ,12s ,- ,- ,- ,WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q2 ,massive_split.events_s,22s ,19s ,16s ,3s ,0.7s ,0.6s ,
|
||||
r5.xlarge (32Gb) ,Q2.2 ,massive_split.events_s,timeout ,27s ,23s ,2s ,0.6s ,0.6s ,events INNER JOIN meta WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q2.2.1 ,massive_split.events_s,timeout ,timeout ,27s ,4s ,1.3s ,0.7s ,events INNER JOIN meta WHERE ALL AND user_id
|
||||
r5.xlarge (32Gb) ,Q2.3 ,massive_split.events_s,15s ,13s ,11s ,2s ,0.6s ,0.6s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q2.4 ,massive_split.events_s,memory ,memory ,memory ,4s ,0.6s ,0.6s ,SUBQUERY WHERE user_id INNER JOIN events
|
||||
r5.xlarge (32Gb) ,Q2.5 ,massive_split.events_s,11s ,10s ,8s ,1s ,0.6s ,0.6s ,WHERE session_id IN SUBQUERY WHERE user_id
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
r5.large (16Gb) ,Q3 ,massive2.events7 ,timeout ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,Q3 ,massive2.events7 ,11s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),Q3 ,massive2.events7 ,6s ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q3 ,massive2.events7 ,22s ,19s ,15s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q3.1 ,massive2.events7 ,18s ,15s ,12s ,- ,- ,- ,WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q3 ,massive_split.events_s,21s ,18s ,15s ,3s ,0.7s ,0.7s ,
|
||||
r5.xlarge (32Gb) ,Q3.2 ,massive_split.events_s,24s ,20s ,17s ,2s ,0.7s ,0.6s ,events INNER JOIN meta WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q3.2.1 ,massive_split.events_s,26s ,22s ,19s ,4s ,1.4s ,0.7s ,events INNER JOIN meta WHERE ALL AND user_id
|
||||
r5.xlarge (32Gb) ,Q3.3 ,massive_split.events_s,15s ,13s ,11s ,2s ,0.6s ,0.6s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q3.4 ,massive_split.events_s,memory ,memory ,memroy ,3s ,0.7s ,0.7s ,SUBQUERY WHERE user_id INNER JOIN events
|
||||
r5.xlarge (32Gb) ,Q3.5 ,massive_split.events_s,13s ,11s ,9s ,1s ,0.7s ,0.6s ,WHERE session_id IN SUBQUERY WHERE user_id
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
r5.large (16Gb) ,Q4 ,massive2.events7 ,timeout ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,Q4 ,massive2.events7 ,11s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),Q4 ,massive2.events7 ,6s ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q4 ,massive2.events7 ,22s ,18s ,15s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,Q4.1 ,massive2.events7 ,18s ,15s ,13s ,- ,- ,- ,WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q4 ,massive_split.events_s,21s ,18s ,15s ,3s ,0.7s ,0.6s ,
|
||||
r5.xlarge (32Gb) ,Q4.2 ,massive_split.events_s,24s ,20s ,17s ,2s ,0.7s ,0.6s ,events INNER JOIN meta WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q4.2.1 ,massive_split.events_s,27s ,22s ,19s ,4s ,1.4s ,0.7s ,events INNER JOIN meta WHERE ALL AND user_id
|
||||
r5.xlarge (32Gb) ,Q4.3 ,massive_split.events_s,15s ,13s ,11s ,2s ,0.6s ,0.7s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,Q4.4 ,massive_split.events_s,memory ,memory ,memroy ,3s ,1.2s ,0.7s ,SUBQUERY WHERE user_id INNER JOIN events
|
||||
r5.xlarge (32Gb) ,Q4.5 ,massive_split.events_s,14s ,12s ,9s ,2s ,0.6s ,0.6s ,WHERE session_id IN SUBQUERY WHERE user_id
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
r5.large (16Gb) ,QU1 ,massive2.events7 ,timeout ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,QU1 ,massive2.events7 ,18s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),QU1 ,massive2.events7 ,10s ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,QU1 ,massive2.events7 ,timeout ,28s ,24s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,QU1.1 ,massive2.events7 ,17s ,14s ,12s ,- ,- ,- ,WHERE user_id
|
||||
r5.xlarge (32Gb) ,QU1.2 ,massive_split.events_s,timeout ,timeout ,timeout ,6s ,1.5s ,0.7s ,events INNER JOIN meta
|
||||
r5.xlarge (32Gb) ,QU1.3 ,massive_split.events_s,timeout ,timeout ,23s ,3s ,1.4s ,0.7s ,events INNER JOIN meta WHERE user_id
|
||||
r5.xlarge (32Gb) ,QU1.4 ,massive_split.events_s,15s ,13s ,11s ,2s ,0.7s ,0.6s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,QU1.4-A,massive_split.events_s,timeout ,timeout ,timeout ,6s ,0.7s ,0.7s ,events INNER JOIN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,QU1.5 ,massive_split.events_s,memory ,memory ,memory ,memory ,memory ,2s ,SUBQUERY WHERE user_id INNER JOIN events
|
||||
r5.xlarge (32Gb) ,QU1.6 ,massive_split.events_s,11s ,10s ,8s ,1s ,0.7s ,0.7s ,events INNER JOIN SUBQUERY WHERE user_id WHERE session_id IN SUBQUERY WHERE user_id
|
||||
r5.xlarge (32Gb) ,QU1.6-A,massive_split.events_s,timeout ,timeout ,timeout ,6s ,0.7s ,0.7s ,events INNER JOIN SUBQUERY WHERE session_id IN SUBQUERY
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
r5.large (16Gb) ,QM4 ,massive2.events7 ,- ,- ,- ,- ,- ,- ,
|
||||
r5.2xlarge (64Gb) ,QM4 ,massive2.events7 ,3s ,- ,- ,- ,- ,- ,
|
||||
r5.3xlarge (128Gb),QM4 ,massive2.events7 ,- ,- ,- ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,QM4 ,massive2.events7 ,7s ,5s ,4s ,- ,- ,- ,
|
||||
r5.xlarge (32Gb) ,QM4 ,massive_split.events_s,6s ,5s ,4s ,1s ,0.7s ,0.6s ,
|
||||
======== ,===== ,================= ,=========== ,============ ,== ,== ,== ,== ,==
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
(
|
||||
project_id UInt16,
|
||||
type LowCardinality(String),
|
||||
value String,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, type, value)
|
||||
TTL _timestamp + INTERVAL 1 MONTH;
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.events
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'LONGTASK'=5,'ERROR'=6,'CUSTOM'=7, 'GRAPHQL'=8, 'STATEACTION'=9),
|
||||
datetime DateTime,
|
||||
label Nullable(String),
|
||||
hesitation_time Nullable(UInt32),
|
||||
name Nullable(String),
|
||||
payload Nullable(String),
|
||||
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
|
||||
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
|
||||
message Nullable(String),
|
||||
error_id Nullable(String),
|
||||
duration Nullable(UInt16),
|
||||
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
|
||||
container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)),
|
||||
container_id Nullable(String),
|
||||
container_name Nullable(String),
|
||||
container_src Nullable(String),
|
||||
url Nullable(String),
|
||||
url_host Nullable(String) MATERIALIZED lower(domain(url)),
|
||||
url_path Nullable(String) MATERIALIZED lower(pathFull(url)),
|
||||
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
|
||||
request_start Nullable(UInt16),
|
||||
response_start Nullable(UInt16),
|
||||
response_end Nullable(UInt16),
|
||||
dom_content_loaded_event_start Nullable(UInt16),
|
||||
dom_content_loaded_event_end Nullable(UInt16),
|
||||
load_event_start Nullable(UInt16),
|
||||
load_event_end Nullable(UInt16),
|
||||
first_paint Nullable(UInt16),
|
||||
first_contentful_paint_time Nullable(UInt16),
|
||||
speed_index Nullable(UInt16),
|
||||
visually_complete Nullable(UInt16),
|
||||
time_to_interactive Nullable(UInt16),
|
||||
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
|
||||
minus(response_start, request_start), Null),
|
||||
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
|
||||
minus(response_end, request_start), Null),
|
||||
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
|
||||
minus(response_end, response_start), Null),
|
||||
dom_building_time Nullable(UInt16) MATERIALIZED if(
|
||||
greaterOrEquals(dom_content_loaded_event_start, response_end),
|
||||
minus(dom_content_loaded_event_start, response_end), Null),
|
||||
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
|
||||
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
|
||||
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
|
||||
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
|
||||
minus(load_event_end, load_event_start), Null),
|
||||
min_fps Nullable(UInt8),
|
||||
avg_fps Nullable(UInt8),
|
||||
max_fps Nullable(UInt8),
|
||||
min_cpu Nullable(UInt8),
|
||||
avg_cpu Nullable(UInt8),
|
||||
max_cpu Nullable(UInt8),
|
||||
min_total_js_heap_size Nullable(UInt64),
|
||||
avg_total_js_heap_size Nullable(UInt64),
|
||||
max_total_js_heap_size Nullable(UInt64),
|
||||
min_used_js_heap_size Nullable(UInt64),
|
||||
avg_used_js_heap_size Nullable(UInt64),
|
||||
max_used_js_heap_size Nullable(UInt64),
|
||||
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
|
||||
status Nullable(UInt16),
|
||||
success Nullable(UInt8),
|
||||
request_body Nullable(String),
|
||||
response_body Nullable(String),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.resources
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
datetime DateTime,
|
||||
url String,
|
||||
url_host String MATERIALIZED lower(domain(url)),
|
||||
url_path String MATERIALIZED lower(path(url)),
|
||||
url_hostpath String MATERIALIZED concat(url_host, url_path),
|
||||
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
|
||||
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
|
||||
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
|
||||
nullIf(splitByChar('/', url_path)[-2], ''))),
|
||||
duration Nullable(UInt16),
|
||||
ttfb Nullable(UInt16),
|
||||
header_size Nullable(UInt16),
|
||||
encoded_body_size Nullable(UInt32),
|
||||
decoded_body_size Nullable(UInt32),
|
||||
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
|
||||
success Nullable(UInt8) COMMENT 'currently available for type=img only',
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, type, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.sessions
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
tracker_version LowCardinality(String),
|
||||
rev_id LowCardinality(Nullable(String)),
|
||||
user_uuid UUID,
|
||||
user_os LowCardinality(String),
|
||||
user_os_version LowCardinality(Nullable(String)),
|
||||
user_browser LowCardinality(String),
|
||||
user_browser_version LowCardinality(Nullable(String)),
|
||||
user_device Nullable(String),
|
||||
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
|
||||
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
|
||||
datetime DateTime,
|
||||
duration UInt32,
|
||||
pages_count UInt16,
|
||||
events_count UInt16,
|
||||
errors_count UInt16,
|
||||
utm_source Nullable(String),
|
||||
utm_medium Nullable(String),
|
||||
utm_campaign Nullable(String),
|
||||
user_id Nullable(String),
|
||||
metadata_1 Nullable(String),
|
||||
metadata_2 Nullable(String),
|
||||
metadata_3 Nullable(String),
|
||||
metadata_4 Nullable(String),
|
||||
metadata_5 Nullable(String),
|
||||
metadata_6 Nullable(String),
|
||||
metadata_7 Nullable(String),
|
||||
metadata_8 Nullable(String),
|
||||
metadata_9 Nullable(String),
|
||||
metadata_10 Nullable(String),
|
||||
issue_types Array(LowCardinality(String)),
|
||||
referrer Nullable(String),
|
||||
base_referrer Nullable(String),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH
|
||||
SETTINGS index_granularity = 512;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
session_id UInt64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
error_id String,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, error_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
|
||||
ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.events
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
|
||||
ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.resources
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY;
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
SETTINGS index_granularity = 512
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.sessions
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY
|
||||
AND isNotNull(duration)
|
||||
AND duration > 0;
|
||||
|
|
@ -0,0 +1 @@
|
|||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
(
|
||||
project_id UInt16,
|
||||
type LowCardinality(String),
|
||||
value String,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, type, value)
|
||||
TTL _timestamp + INTERVAL 1 MONTH;
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.events
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'LONGTASK'=5,'ERROR'=6,'CUSTOM'=7, 'GRAPHQL'=8, 'STATEACTION'=9),
|
||||
datetime DateTime,
|
||||
label Nullable(String),
|
||||
hesitation_time Nullable(UInt32),
|
||||
name Nullable(String),
|
||||
payload Nullable(String),
|
||||
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
|
||||
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
|
||||
message Nullable(String),
|
||||
error_id Nullable(String),
|
||||
duration Nullable(UInt16),
|
||||
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
|
||||
container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)),
|
||||
container_id Nullable(String),
|
||||
container_name Nullable(String),
|
||||
container_src Nullable(String),
|
||||
url Nullable(String),
|
||||
url_host Nullable(String) MATERIALIZED lower(domain(url)),
|
||||
url_path Nullable(String) MATERIALIZED lower(pathFull(url)),
|
||||
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
|
||||
request_start Nullable(UInt16),
|
||||
response_start Nullable(UInt16),
|
||||
response_end Nullable(UInt16),
|
||||
dom_content_loaded_event_start Nullable(UInt16),
|
||||
dom_content_loaded_event_end Nullable(UInt16),
|
||||
load_event_start Nullable(UInt16),
|
||||
load_event_end Nullable(UInt16),
|
||||
first_paint Nullable(UInt16),
|
||||
first_contentful_paint_time Nullable(UInt16),
|
||||
speed_index Nullable(UInt16),
|
||||
visually_complete Nullable(UInt16),
|
||||
time_to_interactive Nullable(UInt16),
|
||||
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
|
||||
minus(response_start, request_start), Null),
|
||||
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
|
||||
minus(response_end, request_start), Null),
|
||||
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
|
||||
minus(response_end, response_start), Null),
|
||||
dom_building_time Nullable(UInt16) MATERIALIZED if(
|
||||
greaterOrEquals(dom_content_loaded_event_start, response_end),
|
||||
minus(dom_content_loaded_event_start, response_end), Null),
|
||||
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
|
||||
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
|
||||
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
|
||||
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
|
||||
minus(load_event_end, load_event_start), Null),
|
||||
min_fps Nullable(UInt8),
|
||||
avg_fps Nullable(UInt8),
|
||||
max_fps Nullable(UInt8),
|
||||
min_cpu Nullable(UInt8),
|
||||
avg_cpu Nullable(UInt8),
|
||||
max_cpu Nullable(UInt8),
|
||||
min_total_js_heap_size Nullable(UInt64),
|
||||
avg_total_js_heap_size Nullable(UInt64),
|
||||
max_total_js_heap_size Nullable(UInt64),
|
||||
min_used_js_heap_size Nullable(UInt64),
|
||||
avg_used_js_heap_size Nullable(UInt64),
|
||||
max_used_js_heap_size Nullable(UInt64),
|
||||
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
|
||||
status Nullable(UInt16),
|
||||
success Nullable(UInt8),
|
||||
request_body Nullable(String),
|
||||
response_body Nullable(String),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.resources
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
datetime DateTime,
|
||||
url String,
|
||||
url_host String MATERIALIZED lower(domain(url)),
|
||||
url_path String MATERIALIZED lower(path(url)),
|
||||
url_hostpath String MATERIALIZED concat(url_host, url_path),
|
||||
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
|
||||
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
|
||||
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
|
||||
nullIf(splitByChar('/', url_path)[-2], ''))),
|
||||
duration Nullable(UInt16),
|
||||
ttfb Nullable(UInt16),
|
||||
header_size Nullable(UInt16),
|
||||
encoded_body_size Nullable(UInt32),
|
||||
decoded_body_size Nullable(UInt32),
|
||||
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
|
||||
success Nullable(UInt8) COMMENT 'currently available for type=img only',
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, type, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.sessions
|
||||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
tracker_version LowCardinality(String),
|
||||
rev_id LowCardinality(Nullable(String)),
|
||||
user_uuid UUID,
|
||||
user_os LowCardinality(String),
|
||||
user_os_version LowCardinality(Nullable(String)),
|
||||
user_browser LowCardinality(String),
|
||||
user_browser_version LowCardinality(Nullable(String)),
|
||||
user_device Nullable(String),
|
||||
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
|
||||
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
|
||||
datetime DateTime,
|
||||
duration UInt32,
|
||||
pages_count UInt16,
|
||||
events_count UInt16,
|
||||
errors_count UInt16,
|
||||
utm_source Nullable(String),
|
||||
utm_medium Nullable(String),
|
||||
utm_campaign Nullable(String),
|
||||
user_id Nullable(String),
|
||||
metadata_1 Nullable(String),
|
||||
metadata_2 Nullable(String),
|
||||
metadata_3 Nullable(String),
|
||||
metadata_4 Nullable(String),
|
||||
metadata_5 Nullable(String),
|
||||
metadata_6 Nullable(String),
|
||||
metadata_7 Nullable(String),
|
||||
metadata_8 Nullable(String),
|
||||
metadata_9 Nullable(String),
|
||||
metadata_10 Nullable(String),
|
||||
issue_types Array(LowCardinality(String)),
|
||||
referrer Nullable(String),
|
||||
base_referrer Nullable(String),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH
|
||||
SETTINGS index_granularity = 512;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
session_id UInt64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
error_id String,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, error_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
|
||||
ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.events
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
|
||||
ENGINE = MergeTree
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.resources
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY;
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 7 DAY
|
||||
SETTINGS index_granularity = 512
|
||||
POPULATE
|
||||
AS
|
||||
SELECT *
|
||||
FROM experimental.sessions
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY
|
||||
AND isNotNull(duration)
|
||||
AND duration > 0;
|
||||
Loading…
Add table
Reference in a new issue