* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* refactor(DB): changed dashboard&metrics constraints

* refactor(chalice): removed cards
refactor(chalice): removed code related to resources
refactor(DB): removed cards
refactor(DB): removed code related to resources
This commit is contained in:
Kraiem Taha Yassine 2024-11-04 17:57:14 +01:00 committed by GitHub
parent 8deb37e8b0
commit d2697061e9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 121 additions and 3708 deletions

View file

@ -34,30 +34,6 @@ LeftToDb = {
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",

View file

@ -7,53 +7,16 @@ from chalicelib.core import metrics
logger = logging.getLogger(__name__)
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.COUNT_SESSIONS: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.AVG_IMAGE_LOAD_TIME: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.AVG_PAGE_LOAD_TIME: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.AVG_REQUEST_LOAD_TIME: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOAD_START: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.AVG_FIRST_CONTENTFUL_PIXEL: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.AVG_VISITED_PAGES: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.AVG_SESSION_DURATION: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.AVG_PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.AVG_PAGES_RESPONSE_TIME: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.AVG_RESPONSE_TIME: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.AVG_FIRST_PAINT: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOADED: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.AVG_TILL_FIRST_BYTE: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.AVG_TIME_TO_INTERACTIVE: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.COUNT_REQUESTS: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.AVG_TIME_TO_RENDER: metrics.get_time_to_render,
schemas.MetricOfWebVitals.AVG_USED_JS_HEAP_SIZE: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.AVG_CPU: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.AVG_FPS: metrics.get_avg_fps,
schemas.MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.DOMAINS_ERRORS_4XX: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.DOMAINS_ERRORS_5XX: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.ERRORS_PER_DOMAINS: metrics.get_errors_per_domains,
schemas.MetricOfErrors.CALLS_ERRORS: metrics.get_calls_errors,
schemas.MetricOfErrors.ERRORS_PER_TYPE: metrics.get_errors_per_type,
schemas.MetricOfErrors.RESOURCES_BY_PARTY: metrics.get_resources_by_party,
schemas.MetricOfPerformance.SPEED_LOCATION: metrics.get_speed_index_location,
schemas.MetricOfPerformance.SLOWEST_DOMAINS: metrics.get_slowest_domains,
schemas.MetricOfPerformance.SESSIONS_PER_BROWSER: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.TIME_TO_RENDER: metrics.get_time_to_render,
schemas.MetricOfPerformance.IMPACTED_SESSIONS_BY_SLOW_PAGES: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.MEMORY_CONSUMPTION: metrics.get_memory_consumption,
schemas.MetricOfPerformance.CPU: metrics.get_avg_cpu,
schemas.MetricOfPerformance.FPS: metrics.get_avg_fps,
schemas.MetricOfPerformance.CRASHES: metrics.get_crashes,
schemas.MetricOfPerformance.RESOURCES_VS_VISUALLY_COMPLETE: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.PAGES_RESPONSE_TIME: metrics.get_pages_response_time,
schemas.MetricOfPerformance.PAGES_RESPONSE_TIME_DISTRIBUTION: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.MISSING_RESOURCES: metrics.get_missing_resources_trend,
schemas.MetricOfResources.SLOWEST_RESOURCES: metrics.get_slowest_resources,
schemas.MetricOfResources.RESOURCES_LOADING_TIME: metrics.get_resources_loading_time,
schemas.MetricOfResources.RESOURCE_TYPE_VS_RESPONSE_END: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.RESOURCES_COUNT_BY_TYPE: metrics.get_resources_count_by_type,
schemas.MetricOfWebVitals.COUNT_USERS: metrics.get_unique_users, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

File diff suppressed because it is too large Load diff

View file

@ -1,32 +0,0 @@
from chalicelib.utils import helper, pg_client
from decouple import config
def get_by_session_id(session_id, project_id, start_ts, duration):
with pg_client.PostgresClient() as cur:
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
duration = 0
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
ch_query = """\
SELECT
timestamp AS datetime,
url,
type,
resources.duration AS duration,
ttfb,
header_size,
encoded_body_size,
decoded_body_size,
success,
COALESCE(CASE WHEN status=0 THEN NULL ELSE status END, CASE WHEN success THEN 200 END) AS status
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE session_id = %(session_id)s
AND project_id= %(project_id)s
AND sessions.start_ts=%(start_ts)s
AND resources.timestamp>=%(res_start_ts)s
AND resources.timestamp<=%(res_end_ts)s;"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
cur.execute(cur.mogrify(ch_query, params))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -1,6 +1,6 @@
import schemas
from chalicelib.core import events, metadata, events_mobile, \
sessions_mobs, issues, resources, assist, sessions_devtool, canvas, user_testing
sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
@ -121,8 +121,6 @@ def get_events(project_id, session_id):
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=s_data["startTs"], duration=s_data["duration"])
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)

View file

@ -393,14 +393,6 @@ class AlertColumn(str, Enum):
PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE = "performance.page_response_time.average"
PERFORMANCE__TTFB__AVERAGE = "performance.ttfb.average"
PERFORMANCE__TIME_TO_RENDER__AVERAGE = "performance.time_to_render.average"
PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE = "performance.image_load_time.average"
PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE = "performance.request_load_time.average"
RESOURCES__LOAD_TIME__AVERAGE = "resources.load_time.average"
RESOURCES__MISSING__COUNT = "resources.missing.count"
ERRORS__4XX_5XX__COUNT = "errors.4xx_5xx.count"
ERRORS__4XX__COUNT = "errors.4xx.count"
ERRORS__5XX__COUNT = "errors.5xx.count"
ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT = "errors.javascript.impacted_sessions.count"
PERFORMANCE__CRASHES__COUNT = "performance.crashes.count"
ERRORS__JAVASCRIPT__COUNT = "errors.javascript.count"
ERRORS__BACKEND__COUNT = "errors.backend.count"
@ -945,7 +937,6 @@ class MetricType(str, Enum):
class MetricOfErrors(str, Enum):
CALLS_ERRORS = "callsErrors"
DOMAINS_ERRORS_4XX = "domainsErrors4xx"
DOMAINS_ERRORS_5XX = "domainsErrors5xx"
ERRORS_PER_DOMAINS = "errorsPerDomains"
@ -954,47 +945,8 @@ class MetricOfErrors(str, Enum):
RESOURCES_BY_PARTY = "resourcesByParty"
class MetricOfPerformance(str, Enum):
CPU = "cpu"
CRASHES = "crashes"
FPS = "fps"
IMPACTED_SESSIONS_BY_SLOW_PAGES = "impactedSessionsBySlowPages"
MEMORY_CONSUMPTION = "memoryConsumption"
PAGES_DOM_BUILDTIME = "pagesDomBuildtime"
PAGES_RESPONSE_TIME = "pagesResponseTime"
PAGES_RESPONSE_TIME_DISTRIBUTION = "pagesResponseTimeDistribution"
RESOURCES_VS_VISUALLY_COMPLETE = "resourcesVsVisuallyComplete"
SESSIONS_PER_BROWSER = "sessionsPerBrowser"
SLOWEST_DOMAINS = "slowestDomains"
SPEED_LOCATION = "speedLocation"
TIME_TO_RENDER = "timeToRender"
class MetricOfResources(str, Enum):
MISSING_RESOURCES = "missingResources"
RESOURCES_COUNT_BY_TYPE = "resourcesCountByType"
RESOURCES_LOADING_TIME = "resourcesLoadingTime"
RESOURCE_TYPE_VS_RESPONSE_END = "resourceTypeVsResponseEnd"
SLOWEST_RESOURCES = "slowestResources"
class MetricOfWebVitals(str, Enum):
AVG_CPU = "avgCpu"
AVG_DOM_CONTENT_LOADED = "avgDomContentLoaded"
AVG_DOM_CONTENT_LOAD_START = "avgDomContentLoadStart"
AVG_FIRST_CONTENTFUL_PIXEL = "avgFirstContentfulPixel"
AVG_FIRST_PAINT = "avgFirstPaint"
AVG_FPS = "avgFps"
AVG_IMAGE_LOAD_TIME = "avgImageLoadTime"
AVG_PAGE_LOAD_TIME = "avgPageLoadTime"
AVG_PAGES_DOM_BUILDTIME = "avgPagesDomBuildtime"
AVG_PAGES_RESPONSE_TIME = "avgPagesResponseTime"
AVG_REQUEST_LOAD_TIME = "avgRequestLoadTime"
AVG_RESPONSE_TIME = "avgResponseTime"
AVG_SESSION_DURATION = "avgSessionDuration"
AVG_TILL_FIRST_BYTE = "avgTillFirstByte"
AVG_TIME_TO_INTERACTIVE = "avgTimeToInteractive"
AVG_TIME_TO_RENDER = "avgTimeToRender"
AVG_USED_JS_HEAP_SIZE = "avgUsedJsHeapSize"
AVG_VISITED_PAGES = "avgVisitedPages"
COUNT_REQUESTS = "countRequests"
@ -1225,43 +1177,9 @@ class CardErrors(__CardSchema):
return self
class CardPerformance(__CardSchema):
metric_type: Literal[MetricType.PERFORMANCE]
metric_of: MetricOfPerformance = Field(default=MetricOfPerformance.CPU)
view_type: MetricOtherViewType = Field(...)
@model_validator(mode="before")
@classmethod
def __enforce_default(cls, values):
values["series"] = []
return values
@model_validator(mode="after")
def __transform(self):
self.metric_of = MetricOfPerformance(self.metric_of)
return self
class CardResources(__CardSchema):
metric_type: Literal[MetricType.RESOURCES]
metric_of: MetricOfResources = Field(default=MetricOfResources.MISSING_RESOURCES)
view_type: MetricOtherViewType = Field(...)
@model_validator(mode="before")
@classmethod
def __enforce_default(cls, values):
values["series"] = []
return values
@model_validator(mode="after")
def __transform(self):
self.metric_of = MetricOfResources(self.metric_of)
return self
class CardWebVital(__CardSchema):
metric_type: Literal[MetricType.WEB_VITAL]
metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.AVG_CPU)
metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.AVG_VISITED_PAGES)
view_type: MetricOtherViewType = Field(...)
@model_validator(mode="before")
@ -1390,7 +1308,7 @@ class CardPathAnalysis(__CardSchema):
# Union of cards-schemas that doesn't change between FOSS and EE
__cards_union_base = Union[
CardTimeSeries, CardTable, CardFunnel,
CardErrors, CardPerformance, CardResources,
CardErrors,
CardWebVital, CardHeatMap,
CardPathAnalysis]
CardSchema = ORUnion(Union[__cards_union_base, CardInsights], discriminator='metric_type')

View file

@ -44,6 +44,3 @@ if config("EXP_FUNNELS", cast=bool, default=False):
from . import significance_exp as significance
else:
from . import significance as significance
if config("EXP_RESOURCES", cast=bool, default=False):
logging.info(">>> Using experimental resources for session-replay")

View file

@ -40,30 +40,6 @@ LeftToDb = {
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",

View file

@ -52,49 +52,6 @@ LeftToDb = {
"formula": "AVG(NULLIF(visually_complete,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='img'"
},
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='fetch'"
},
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))"
},
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "COUNT(DISTINCT url_hostpath)",
"condition": "success= FALSE AND type='img'"
},
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)!=2"
},
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==4"
},
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==5"
},
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source='js_exception'"
},
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_sessions_table(timestamp)} AS sessions",
"formula": "COUNT(DISTINCT session_id)",

File diff suppressed because it is too large Load diff

View file

@ -1,45 +0,0 @@
from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils import ch_client
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
def get_by_session_id(session_id, project_id, start_ts, duration):
with ch_client.ClickHouseClient() as ch:
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
duration = 0
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
if config("EXP_RESOURCES", cast=bool, default=False):
ch_query = f"""SELECT
datetime,url,type,duration,ttfb,header_size,
encoded_body_size,decoded_body_size,success,
if(success, 200, 400) AS status
FROM {exp_ch_helper.get_main_resources_table(start_ts)}
WHERE session_id = toUInt64(%(session_id)s)
AND project_id = toUInt16(%(project_id)s)
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
else:
ch_query = """SELECT
datetime,url,type,duration,ttfb,header_size,
encoded_body_size,decoded_body_size,success,
coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s)
AND project_id = toUInt16(%(project_id)s)
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
rows = ch.execute(query=ch_query, params=params)
results = []
for r in rows:
r["datetime"] = TimeUTC.datetime_to_timestamp(r["datetime"])
# TODO: remove this once the tracker is fixed
if isinstance(r["url"], bytes):
try:
r["url"] = r["url"].decode("utf-8")
except UnicodeDecodeError:
continue
results.append(r)
return helper.list_to_camel_case(results)

View file

@ -1,6 +1,6 @@
import schemas
from chalicelib.core import events, metadata, events_mobile, \
sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas, user_testing
sessions_mobs, issues, assist, sessions_devtool, canvas, user_testing
from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper
@ -125,8 +125,6 @@ def get_events(project_id, session_id):
if e['source'] == "js_exception"][:500]
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
session_id=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=s_data["startTs"], duration=s_data["duration"])
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)

View file

@ -26,12 +26,6 @@ def get_main_sessions_table(timestamp=0):
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
def get_main_resources_table(timestamp=0):
return "experimental.resources_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources"
def get_autocomplete_table(timestamp=0):
return "experimental.autocomplete"

View file

@ -1 +1,6 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.21.0-ee';
DROP TABLE IF EXISTS experimental.resources_l7d_mv;
DROP TABLE IF EXISTS experimental.resources;

View file

@ -90,32 +90,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.resources
(
session_id UInt64,
project_id UInt16,
datetime DateTime,
url String,
url_host String MATERIALIZED lower(domain(url)),
url_path String,
url_hostpath String MATERIALIZED concat(url_host, url_path),
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
nullIf(splitByChar('/', url_path)[-2], ''))),
duration Nullable(UInt16),
ttfb Nullable(UInt16),
header_size Nullable(UInt16),
encoded_body_size Nullable(UInt32),
decoded_body_size Nullable(UInt32),
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
success Nullable(UInt8) COMMENT 'currently available for type=img only',
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.sessions
(
@ -289,33 +264,6 @@ SELECT session_id,
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, type, session_id, message_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT session_id,
project_id,
datetime,
url,
url_host,
url_path,
url_hostpath,
type,
name,
duration,
ttfb,
header_size,
encoded_body_size,
decoded_body_size,
compression_ratio,
success,
message_id,
_timestamp
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)

View file

@ -35,6 +35,14 @@ CREATE TABLE IF NOT EXISTS public.session_integrations
PRIMARY KEY (session_id, project_id, provider)
);
ALTER TABLE IF EXISTS public.metrics
ALTER COLUMN user_id DROP NOT NULL,
ALTER COLUMN project_id SET NOT NULL;
ALTER TABLE IF EXISTS public.dashboards
ALTER COLUMN user_id DROP NOT NULL,
ALTER COLUMN project_id SET NOT NULL;
COMMIT;
\elif :is_next

View file

@ -758,44 +758,6 @@ CREATE TABLE events.state_actions
CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
CREATE INDEX state_actions_timestamp_idx ON events.state_actions (timestamp);
CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media');
CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' );
CREATE TABLE events.resources
(
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
timestamp bigint NOT NULL,
duration bigint NULL,
type events.resource_type NOT NULL,
url text NOT NULL,
url_host text NOT NULL,
url_hostpath text NOT NULL,
success boolean NOT NULL,
status smallint NULL,
method events.resource_method NULL,
ttfb bigint NULL,
header_size bigint NULL,
encoded_body_size integer NULL,
decoded_body_size integer NULL,
PRIMARY KEY (session_id, message_id, timestamp)
);
CREATE INDEX resources_session_id_idx ON events.resources (session_id);
CREATE INDEX resources_status_idx ON events.resources (status);
CREATE INDEX resources_type_idx ON events.resources (type);
CREATE INDEX resources_url_host_idx ON events.resources (url_host);
CREATE INDEX resources_timestamp_idx ON events.resources (timestamp);
CREATE INDEX resources_success_idx ON events.resources (success);
CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script');
CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img';
CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id);
CREATE INDEX resources_timestamp_duration_durationgt0NN_idx ON events.resources (timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL;
CREATE TABLE events.performance
(
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
@ -875,8 +837,8 @@ CREATE INDEX jobs_project_id_idx ON public.jobs (project_id);
CREATE TABLE public.metrics
(
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES public.users (user_id) ON DELETE SET NULL,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer NULL REFERENCES public.users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
@ -915,7 +877,7 @@ CREATE TABLE public.dashboards
(
dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES public.users (user_id) ON DELETE SET NULL,
user_id integer NULL REFERENCES public.users (user_id) ON DELETE SET NULL,
name text NOT NULL,
description text NOT NULL DEFAULT '',
is_public boolean NOT NULL DEFAULT TRUE,

View file

@ -1 +1,56 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.20.0-ee';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.20.0-ee';
CREATE TABLE IF NOT EXISTS experimental.resources
(
session_id UInt64,
project_id UInt16,
datetime DateTime,
url String,
url_host String MATERIALIZED lower(domain(url)),
url_path String,
url_hostpath String MATERIALIZED concat(url_host, url_path),
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
nullIf(splitByChar('/', url_path)[-2], ''))),
duration Nullable(UInt16),
ttfb Nullable(UInt16),
header_size Nullable(UInt16),
encoded_body_size Nullable(UInt32),
decoded_body_size Nullable(UInt32),
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
success Nullable(UInt8) COMMENT 'currently available for type=img only',
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, type, session_id, message_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT session_id,
project_id,
datetime,
url,
url_host,
url_path,
url_hostpath,
type,
name,
duration,
ttfb,
header_size,
encoded_body_size,
decoded_body_size,
compression_ratio,
success,
message_id,
_timestamp
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -31,6 +31,46 @@ CREATE TABLE IF NOT EXISTS public.session_integrations
PRIMARY KEY (session_id, project_id, provider)
);
ALTER TABLE IF EXISTS public.metrics
ALTER COLUMN user_id DROP NOT NULL,
ALTER COLUMN project_id SET NOT NULL;
ALTER TABLE IF EXISTS public.dashboards
ALTER COLUMN user_id DROP NOT NULL,
ALTER COLUMN project_id SET NOT NULL;
DELETE
FROM public.metrics
WHERE metric_of IN ('avgCpu', 'avgDomContentLoaded',
'avgDomContentLoadStart', 'avgFirstContentfulPixel',
'avgFirstPaint',
'avgFps', 'avgImageLoadTime',
'avgPageLoadTime', 'avgRequestLoadTime',
'avgResponseTime', 'avgSessionDuration',
'avgTillFirstByte', 'avgTimeToRender')
or metric_of IN ('timeToRender', 'cpu','crashes'
'fps', 'avgTimeToInteractive',
'avgPagesResponseTime', 'avgUsedJsHeapSize',
'memoryConsumption', 'pagesResponseTime',
'pagesDomBuildtime', 'pagesResponseTimeDistribution',
'resourcesVsVisuallyComplete', 'sessionsPerBrowser',
'slowestDomains', 'speedLocation', 'impactedSessionsBySlowPages',
'avgPagesDomBuildtime')
or metric_of IN ('missingResources', 'resourcesLoadingTime',
'slowestResources', 'callsErrors','resourceTypeVsResponseEnd',
'resourcesCountByType');
DELETE
FROM public.alerts
WHERE query ->> 'left' IN ('performance.image_load_time.average', 'performance.request_load_time.average',
'resources.load_time.average', 'resources.missing.count',
'errors.4xx_5xx.count', 'errors.4xx.count',
'errors.5xx.count', 'errors.javascript.impacted_sessions.count');
DROP TABLE IF EXISTS events.resources;
DROP TYPE IF EXISTS events.resource_type;
DROP TYPE IF EXISTS events.resource_method;
COMMIT;
\elif :is_next

View file

@ -719,44 +719,6 @@ CREATE TABLE events.state_actions
CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
CREATE INDEX state_actions_timestamp_idx ON events.state_actions (timestamp);
CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media');
CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' );
CREATE TABLE events.resources
(
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
timestamp bigint NOT NULL,
duration bigint NULL,
type events.resource_type NOT NULL,
url text NOT NULL,
url_host text NOT NULL,
url_hostpath text NOT NULL,
success boolean NOT NULL,
status smallint NULL,
method events.resource_method NULL,
ttfb bigint NULL,
header_size bigint NULL,
encoded_body_size integer NULL,
decoded_body_size integer NULL,
PRIMARY KEY (session_id, message_id, timestamp)
);
CREATE INDEX resources_session_id_idx ON events.resources (session_id);
CREATE INDEX resources_status_idx ON events.resources (status);
CREATE INDEX resources_type_idx ON events.resources (type);
CREATE INDEX resources_url_host_idx ON events.resources (url_host);
CREATE INDEX resources_timestamp_idx ON events.resources (timestamp);
CREATE INDEX resources_success_idx ON events.resources (success);
CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script');
CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img';
CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id);
CREATE INDEX resources_timestamp_duration_durationgt0NN_idx ON events.resources (timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL;
CREATE TABLE events.performance
(
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
@ -836,8 +798,8 @@ CREATE INDEX jobs_project_id_idx ON public.jobs (project_id);
CREATE TABLE public.metrics
(
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES public.users (user_id) ON DELETE SET NULL,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer NULL REFERENCES public.users (user_id) ON DELETE SET NULL,
name text NOT NULL,
is_public boolean NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
@ -876,7 +838,7 @@ CREATE TABLE public.dashboards
(
dashboard_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
user_id integer REFERENCES public.users (user_id) ON DELETE SET NULL,
user_id integer NULL REFERENCES public.users (user_id) ON DELETE SET NULL,
name text NOT NULL,
description text NOT NULL DEFAULT '',
is_public boolean NOT NULL DEFAULT TRUE,