Merge remote-tracking branch 'origin/api-v1.8.0' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-08-25 19:14:03 +01:00
commit 781c2da711
80 changed files with 7095 additions and 1764 deletions

View file

@ -15,10 +15,12 @@ class ProjectAuthorizer:
if len(request.path_params.keys()) == 0 or request.path_params.get(self.project_identifier) is None:
return
current_user: schemas.CurrentContext = await OR_context(request)
project_identifier = request.path_params[self.project_identifier]
value = request.path_params[self.project_identifier]
if (self.project_identifier == "projectId" \
and projects.get_project(project_id=project_identifier, tenant_id=current_user.tenant_id) is None) \
or (self.project_identifier.lower() == "projectKey" \
and projects.get_internal_project_id(project_key=project_identifier) is None):
and not (isinstance(value, int) or isinstance(value, str) and value.isnumeric())
and projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None) \
or (self.project_identifier == "projectKey" \
and projects.get_internal_project_id(project_key=value) is None):
print("project not found")
print(value)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="project not found.")

View file

@ -0,0 +1,104 @@
import schemas
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.event_filter_definition import Event
TABLE = "public.autocomplete"
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
query = cur.mogrify(" UNION DISTINCT ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
try:
cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- VALUE -----------")
print(value)
print("--------------------")
raise err
results = helper.list_to_camel_case(cur.fetchall())
return results
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
cur.execute(cur.mogrify(query, params))
return helper.list_to_camel_case(cur.fetchall())
return f
def __generic_autocomplete_metas(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(__generic_query(typename, value_length=len(text)),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
cur.execute(query)
rows = cur.fetchall()
return rows
return f

View file

@ -91,7 +91,7 @@ def __get_sessions_list(project_id, user_id, data):
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return sessions.search2_pg(data=data.series[0].filter, project_id=project_id, user_id=user_id)
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
@ -166,7 +166,7 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
s.filter.limit = data.limit
s.filter.page = data.page
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
@ -213,7 +213,7 @@ def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadS
s.filter.limit = data.limit
s.filter.page = data.page
results.append({"seriesId": None, "seriesName": s.name,
**sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
return results
@ -532,7 +532,7 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"lostConversions": 0,
"unaffectedSessions": 0}
return {"seriesId": s.series_id, "seriesName": s.name,
"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter)
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
issue=issue, data=s.filter)
if issue is not None else {"total": 0, "sessions": []},
"issue": issue}

View file

@ -251,10 +251,7 @@ def get_details(project_id, error_id, user_id, **data):
parent_error_id,session_id, user_anonymous_id,
user_id, user_uuid, user_browser, user_browser_version,
user_os, user_os_version, user_device, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE pe.error_id = fe.error_id
AND fe.user_id = %(user_id)s), FALSE) AS favorite,
FALSE AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
@ -424,10 +421,11 @@ def __get_sort_key(key):
}.get(key, 'max_datetime')
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {'total': 0,
'errors': []
}
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
empty_response = {
'total': 0,
'errors': []
}
platform = None
for f in data.filters:
@ -449,17 +447,12 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
@ -488,9 +481,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
# if data.bookmarked:
# pg_sub_query.append("ufe.user_id = %(userId)s")
# extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
@ -509,7 +502,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT COALESCE(user_id,user_uuid)) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
@ -544,19 +537,13 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"count": total}
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
"""SELECT error_id,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
@ -574,26 +561,12 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
'total': total - offset,
'total': total,
'errors': helper.list_to_camel_case(rows)
}

View file

@ -0,0 +1,48 @@
from chalicelib.utils import pg_client
def add_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""INSERT INTO public.user_favorite_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": True}
def remove_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""DELETE FROM public.user_favorite_errors
WHERE
user_id = %(userId)s
AND error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": False}
def favorite_error(project_id, user_id, error_id):
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id)
if not exists:
return {"errors": ["cannot bookmark non-rehydrated errors"]}
if favorite:
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
def error_exists_and_favorite(user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite
FROM public.errors
LEFT JOIN (SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s) AS ufe USING (error_id)
WHERE error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
r = cur.fetchone()
if r is None:
return False, False
return True, r.get("favorite") is not None

View file

@ -1,91 +0,0 @@
from chalicelib.utils import pg_client
def add_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_errors
(user_id, error_id)
VALUES
(%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": True}
def remove_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_errors
WHERE
user_id = %(userId)s
AND error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": False}
def favorite_error(project_id, user_id, error_id):
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id)
if not exists:
return {"errors": ["cannot bookmark non-rehydrated errors"]}
if favorite:
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
def error_exists_and_favorite(user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite
FROM public.errors
LEFT JOIN (SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s) AS ufe USING (error_id)
WHERE error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
r = cur.fetchone()
if r is None:
return False, False
return True, r.get("favorite") is not None
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.user_viewed_errors
(user_id, error_id)
VALUES
(%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE ve.error_id = %(error_id)s
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
query=query
)
r = cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -0,0 +1,37 @@
from chalicelib.utils import pg_client
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE ve.error_id = %(error_id)s
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
query=query
)
r = cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -1,10 +1,14 @@
import schemas
from chalicelib.core import issues
from chalicelib.core import sessions_metas, metadata
from chalicelib.core import metadata
from chalicelib.core import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
from chalicelib.core import autocomplete
def get_customs_by_sessionId2_pg(session_id, project_id):
with pg_client.PostgresClient() as cur:
@ -92,11 +96,6 @@ def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
return rows
def __get_data_for_extend(data):
if "errors" not in data:
return data["data"]
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
@ -110,7 +109,7 @@ def __pg_errors_query(source=None, value_length=None):
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
@ -122,7 +121,7 @@ def __pg_errors_query(source=None, value_length=None):
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
UNION DISTINCT
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
@ -134,7 +133,7 @@ def __pg_errors_query(source=None, value_length=None):
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
@ -157,7 +156,7 @@ def __pg_errors_query(source=None, value_length=None):
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
UNION DISTINCT
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
@ -177,8 +176,7 @@ def __search_pg_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source,
value_length=len(value) \
if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None),
value_length=len(value)),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
@ -189,7 +187,7 @@ def __search_pg_errors(project_id, value, key=None, source=None):
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2:
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
@ -268,7 +266,7 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2:
if len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
@ -294,48 +292,6 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
return results
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
__generic_query(event.ui_type,
value_length=len(value) \
if SUPPORTED_TYPES[event.ui_type].change_by_length \
else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
return helper.list_to_camel_case(cur.fetchall())
return f
class event_type:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
@ -358,99 +314,65 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
query=__generic_query(typename=event_type.CLICK.ui_type),
change_by_length=True),
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
query=__generic_query(typename=event_type.INPUT.ui_type),
change_by_length=True),
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
query=__generic_query(typename=event_type.LOCATION.ui_type),
change_by_length=True),
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
query=__generic_query(typename=event_type.CUSTOM.ui_type),
change_by_length=True),
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
query=__generic_query(typename=event_type.REQUEST.ui_type),
change_by_length=True),
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
change_by_length=True),
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
query=__generic_query(typename=event_type.STATEACTION.ui_type),
change_by_length=True),
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
query=autocomplete.__generic_query(
typename=event_type.LOCATION.ui_type)),
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
query=autocomplete.__generic_query(
typename=event_type.REQUEST.ui_type)),
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
query=autocomplete.__generic_query(
typename=event_type.GRAPHQL.ui_type)),
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
query=autocomplete.__generic_query(
typename=event_type.STATEACTION.ui_type)),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None, change_by_length=True),
query=None),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None, change_by_length=True),
query=None),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
change_by_length=True),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
change_by_length=True),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
change_by_length=True),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
change_by_length=True),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
change_by_length=True),
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
query=autocomplete.__generic_query(
typename=event_type.CLICK_IOS.ui_type)),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
query=autocomplete.__generic_query(
typename=event_type.INPUT_IOS.ui_type)),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
query=autocomplete.__generic_query(
typename=event_type.VIEW_IOS.ui_type)),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
query=autocomplete.__generic_query(
typename=event_type.CUSTOM_IOS.ui_type)),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
query=autocomplete.__generic_query(
typename=event_type.REQUEST_IOS.ui_type)),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None, change_by_length=True),
query=None),
}
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
LIMIT 5)""")
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(" UNION ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
try:
cur.execute(query)
except Exception as err:
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- VALUE -----------")
print(value)
print("--------------------")
raise err
results = helper.list_to_camel_case(cur.fetchall())
return results
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
@ -470,16 +392,3 @@ def search(text, event_type, project_id, source, key):
return {"errors": ["unsupported event"]}
return {"data": rows}
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)

View file

@ -138,8 +138,8 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
counts = sessions.search2_pg(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
project_id=project_id, user_id=None, count_only=True)
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
project_id=project_id, user_id=None, count_only=True)
row["sessionsCount"] = counts["countSessions"]
row["usersCount"] = counts["countUsers"]
filter_clone = dict(row["filter"])
@ -193,8 +193,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
return sessions.search2_pg(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), project_id=project_id,
user_id=user_id)
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), project_id=project_id,
user_id=user_id)
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
@ -207,8 +207,8 @@ def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.Funnel
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
start_date=data.startDate, end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
return sessions.search2_pg(data=data, project_id=project_id,
user_id=user_id)
return sessions.search_sessions(data=data, project_id=project_id,
user_id=user_id)
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
@ -365,8 +365,8 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
if i.get("issueId", "") == issue_id:
issue = i
break
return {"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, issue=issue,
data=data) if issue is not None else {"total": 0, "sessions": []},
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
data=data) if issue is not None else {"total": 0, "sessions": []},
# "stages": helper.list_to_camel_case(insights),
# "totalDropDueToIssues": total_drop_due_to_issues,
"issue": issue}

View file

@ -765,8 +765,8 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args)
pg_sub_query.append("resources.success = FALSE")
pg_sub_query_chart.append("resources.success = FALSE")
pg_sub_query.append("resources.type != 'fetch'")
pg_sub_query_chart.append("resources.type != 'fetch'")
pg_sub_query.append("resources.type = 'img'")
pg_sub_query_chart.append("resources.type = 'img'")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT
@ -1580,27 +1580,27 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="resources", time_column="timestamp", project=False,
data=args, main_table="requests", time_column="timestamp", project=False,
duration=False)
pg_sub_query_subset.append("resources.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp<%(endTimestamp)s")
pg_sub_query_subset.append("resources.status/100 = %(status_code)s")
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("requests.timestamp<%(endTimestamp)s")
pg_sub_query_subset.append("requests.status/100 = %(status_code)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS(SELECT resources.url_host, timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
pg_query = f"""WITH requests AS(SELECT requests.host, timestamp
FROM events_common.requests INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(JSONB_AGG(resources) FILTER ( WHERE resources IS NOT NULL ), '[]'::JSONB) AS keys
COALESCE(JSONB_AGG(requests) FILTER ( WHERE requests IS NOT NULL ), '[]'::JSONB) AS keys
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT resources.url_host, COUNT(resources.*) AS count
FROM resources
LEFT JOIN LATERAL ( SELECT requests.host, COUNT(*) AS count
FROM requests
WHERE {" AND ".join(pg_sub_query_chart)}
GROUP BY url_host
GROUP BY host
ORDER BY count DESC
LIMIT 5
) AS resources ON (TRUE)
) AS requests ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"project_id": project_id,
@ -1625,37 +1625,37 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return result
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="resources", time_column="timestamp", project=False,
data=args, main_table="requests", time_column="timestamp", project=False,
duration=False)
pg_sub_query_subset.append("resources.status/100 = %(status_code)s")
pg_sub_query_subset.append("requests.status/100 = %(status_code)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (SELECT resources.url_host, timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
pg_query = f"""WITH requests AS (SELECT host, timestamp
FROM events_common.requests INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(JSONB_AGG(resources) FILTER ( WHERE resources IS NOT NULL ), '[]'::JSONB) AS keys
COALESCE(JSONB_AGG(requests) FILTER ( WHERE requests IS NOT NULL ), '[]'::JSONB) AS keys
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT resources.url_host, COUNT(resources.url_host) AS count
FROM resources
LEFT JOIN LATERAL ( SELECT requests.host, COUNT(*) AS count
FROM requests
WHERE {" AND ".join(pg_sub_query_chart)}
GROUP BY url_host
GROUP BY host
ORDER BY count DESC
LIMIT 5
) AS resources ON (TRUE)
) AS requests ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 4, **__get_constraint_values(args)}
"status_code": status, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows)
@ -1665,44 +1665,16 @@ def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
return rows
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
return __get_domains_errors_4xx_and_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp, density=density, **args)
def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
data=args, main_table="resources", time_column="timestamp", project=False,
duration=False)
pg_sub_query_subset.append("resources.status/100 = %(status_code)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (SELECT resources.url_host, timestamp
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
COALESCE(JSONB_AGG(resources) FILTER ( WHERE resources IS NOT NULL ), '[]'::JSONB) AS keys
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT resources.url_host, COUNT(resources.url_host) AS count
FROM resources
WHERE {" AND ".join(pg_sub_query_chart)}
GROUP BY url_host
ORDER BY count DESC
LIMIT 5
) AS resources ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 5, **__get_constraint_values(args)}
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __nested_array_to_dict_array(rows)
neutral = __get_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
return rows
return __get_domains_errors_4xx_and_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp, density=density, **args)
def __nested_array_to_dict_array(rows, key="url_host", value="count"):
@ -1747,15 +1719,15 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.success = FALSE")
pg_sub_query.append("requests.success = FALSE")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT
resources.url_host AS domain,
COUNT(resources.session_id) AS errors_count
FROM events.resources INNER JOIN sessions USING (session_id)
requests.host AS domain,
COUNT(requests.session_id) AS errors_count
FROM events_common.requests INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.url_host
GROUP BY requests.host
ORDER BY errors_count DESC
LIMIT 5;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
@ -1823,7 +1795,7 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY (4 + 5), 3 DESC
ORDER BY (4 + 5) DESC, 3 DESC
LIMIT 50;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
@ -1832,50 +1804,45 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
return helper.list_to_camel_case(rows)
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.type = 'fetch'")
pg_sub_query.append("resources.method IS NOT NULL")
pg_sub_query.append("resources.status/100 = 4")
pg_sub_query.append("requests.type = 'fetch'")
pg_sub_query.append("requests.method IS NOT NULL")
pg_sub_query.append(f"requests.status/100 = {status}")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
FROM events.resources INNER JOIN sessions USING (session_id)
pg_query = f"""SELECT requests.method,
requests.host,
requests.path,
COUNT(requests.session_id) AS all_requests
FROM events_common.requests INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.method, resources.url_hostpath
GROUP BY requests.method, requests.host, requests.path
ORDER BY all_requests DESC
LIMIT 10;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
rows = cur.fetchall()
for r in rows:
r["url_hostpath"] = r.pop("host") + r.pop("path")
return helper.list_to_camel_case(rows)
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
return __get_calls_errors_4xx_or_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args)
pg_sub_query.append("resources.type = 'fetch'")
pg_sub_query.append("resources.method IS NOT NULL")
pg_sub_query.append("resources.status/100 = 5")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
FROM events.resources INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY all_requests DESC
LIMIT 10;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}))
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
return __get_calls_errors_4xx_or_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
@ -1883,10 +1850,9 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args)
pg_sub_query_subset.append("resources.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp<%(endTimestamp)s")
pg_sub_query_subset.append("resources.type != 'fetch'")
pg_sub_query_subset.append("resources.status > 200")
pg_sub_query_subset.append("requests.timestamp>=%(startTimestamp)s")
pg_sub_query_subset.append("requests.timestamp<%(endTimestamp)s")
pg_sub_query_subset.append("requests.status_code > 200")
pg_sub_query_subset_e = __get_constraints(project_id=project_id, data=args, duration=False, main_table="m_errors",
time_constraint=False)
@ -1897,8 +1863,8 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
pg_sub_query_subset_e.append("timestamp<%(endTimestamp)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (SELECT status, timestamp
FROM events.resources
pg_query = f"""WITH requests AS (SELECT status_code AS status, timestamp
FROM events_common.requests
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
),
@ -1927,7 +1893,7 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
), 0) AS integrations
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT status
FROM resources
FROM requests
WHERE {" AND ".join(pg_sub_query_chart)}
) AS errors_partition ON (TRUE)
GROUP BY timestamp
@ -2169,44 +2135,44 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, project=False,
chart=True, data=args, main_table="resources", time_column="timestamp",
chart=True, data=args, main_table="requests", time_column="timestamp",
duration=False)
pg_sub_query_subset.append("resources.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("resources.timestamp < %(endTimestamp)s")
pg_sub_query_subset.append("resources.success = FALSE")
pg_sub_query_subset.append("requests.timestamp >= %(startTimestamp)s")
pg_sub_query_subset.append("requests.timestamp < %(endTimestamp)s")
# pg_sub_query_subset.append("resources.type IN ('fetch', 'script')")
pg_sub_query_subset.append("requests.success = FALSE")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (
SELECT resources.url_host, timestamp
FROM events.resources
pg_query = f"""WITH requests AS (
SELECT requests.host, timestamp
FROM events_common.requests
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
)
SELECT generated_timestamp AS timestamp,
SUM(CASE WHEN first.url_host = sub_resources.url_host THEN 1 ELSE 0 END) AS first_party,
SUM(CASE WHEN first.url_host != sub_resources.url_host THEN 1 ELSE 0 END) AS third_party
SUM(CASE WHEN first.host = sub_requests.host THEN 1 ELSE 0 END) AS first_party,
SUM(CASE WHEN first.host != sub_requests.host THEN 1 ELSE 0 END) AS third_party
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN (
SELECT resources.url_host,
COUNT(resources.session_id) AS count
FROM events.resources
SELECT requests.host,
COUNT(requests.session_id) AS count
FROM events_common.requests
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = '1'
AND resources.type IN ('fetch', 'script')
AND sessions.start_ts > (EXTRACT(EPOCH FROM now() - INTERVAL '31 days') * 1000)::BIGINT
AND sessions.start_ts < (EXTRACT(EPOCH FROM now()) * 1000)::BIGINT
AND resources.timestamp > (EXTRACT(EPOCH FROM now() - INTERVAL '31 days') * 1000)::BIGINT
AND resources.timestamp < (EXTRACT(EPOCH FROM now()) * 1000)::BIGINT
AND requests.timestamp > (EXTRACT(EPOCH FROM now() - INTERVAL '31 days') * 1000)::BIGINT
AND requests.timestamp < (EXTRACT(EPOCH FROM now()) * 1000)::BIGINT
AND sessions.duration>0
GROUP BY resources.url_host
GROUP BY requests.host
ORDER BY count DESC
LIMIT 1
) AS first ON (TRUE)
LEFT JOIN LATERAL (
SELECT resources.url_host
FROM resources
SELECT requests.host
FROM requests
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sub_resources ON (TRUE)
) AS sub_requests ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
cur.execute(cur.mogrify(pg_query, {"step_size": step_size,

View file

@ -43,25 +43,53 @@ def __create(tenant_id, name):
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False):
with pg_client.PostgresClient() as cur:
recorded_q = ""
extra_projection = ""
extra_join = ""
if gdpr:
extra_projection += ',s.gdpr'
if recorded:
recorded_q = """, COALESCE((SELECT TRUE
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM s.created_at) * 1000 - 24 * 60 * 60 * 1000)
AND sessions.start_ts <= %(now)s
LIMIT 1), FALSE) AS recorded"""
query = cur.mogrify(f"""SELECT
s.project_id, s.name, s.project_key, s.save_request_payloads
{',s.gdpr' if gdpr else ''}
{recorded_q}
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
extra_projection += """, COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT,
(SELECT MIN(sessions.start_ts)
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM
COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-24*60*60*1000)
AND sessions.start_ts <= %(now)s
LIMIT 1), NULL) AS first_recorded"""
if stack_integrations:
extra_projection += ',stack_integrations.count>0 AS stack_integrations'
if stack_integrations:
extra_join = """LEFT JOIN LATERAL (SELECT COUNT(*) AS count
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at
{extra_projection}
FROM public.projects AS s
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
{extra_join}
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;""", {"now": TimeUTC.now()})
ORDER BY s.project_id {") AS raw" if recorded else ""};""", {"now": TimeUTC.now()})
cur.execute(query)
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
if recorded:
for r in rows:
if r["first_recorded_session_at"] is None:
extra_update = ""
if r["recorded"]:
extra_update = ", first_recorded_session_at=to_timestamp(%(first_recorded)s/1000)"
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc')
{extra_update}
WHERE project_id=%(project_id)s""",
{"project_id": r["project_id"], "first_recorded": r["first_recorded"]})
cur.execute(query)
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if recording_state:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last

View file

@ -2,7 +2,7 @@ from typing import List
import schemas
from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, projects, errors, resources, assist, performance_event
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite
from chalicelib.utils import pg_client, helper, metrics_helper
SESSION_PROJECTION_COLS = """s.project_id,
@ -172,8 +172,12 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
if data.bookmarked:
data.startDate,data.endDate = sessions_favorite.get_start_end_timestamp(project_id,user_id)
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
@ -187,16 +191,12 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
meta_keys = []
with pg_client.PostgresClient() as cur:
if errors_only:
main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, ser.status, ser.parent_error_id, ser.payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS favorite,
COALESCE((SELECT TRUE
main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE er.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
{query_part};""", full_args)
{query_part};""", full_args)
elif count_only:
main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions,
@ -401,6 +401,7 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
event.filters is None or len(event.filters) == 0))
# this function generates the query and return the generated-query with the dict of query arguments
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
@ -717,7 +718,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
if event.source[0] not in [None, "*", ""]:
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
@ -989,13 +990,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
extra_constraints.append("ser.project_id = %(project_id)s")
if error_status != schemas.ErrorStatus.all:
extra_constraints.append("ser.status = %(error_status)s")
full_args["error_status"] = error_status
if favorite_only:
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(userId)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
# if error_status != schemas.ErrorStatus.all:
# extra_constraints.append("ser.status = %(error_status)s")
# full_args["error_status"] = error_status
# if favorite_only:
# extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
# extra_constraints.append("ufe.user_id = %(userId)s")
if favorite_only and not errors_only and user_id is not None:
extra_from += """INNER JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions

View file

@ -6,10 +6,8 @@ def add_favorite_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s);""",
INSERT INTO public.user_favorite_sessions(user_id, session_id)
VALUES (%(userId)s,%(sessionId)s);""",
{"userId": user_id, "sessionId": session_id})
)
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
@ -21,8 +19,7 @@ def remove_favorite_session(project_id, user_id, session_id):
cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
WHERE user_id = %(userId)s
AND session_id = %(sessionId)s;""",
{"userId": user_id, "sessionId": session_id})
)
@ -30,19 +27,6 @@ def remove_favorite_session(project_id, user_id, session_id):
include_fav_viewed=True)
def add_viewed_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.user_viewed_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
def favorite_session(project_id, user_id, session_id):
if favorite_session_exists(user_id=user_id, session_id=session_id):
return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
@ -50,16 +34,11 @@ def favorite_session(project_id, user_id, session_id):
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
def view_session(project_id, user_id, session_id):
return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id)
def favorite_session_exists(user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT
session_id
"""SELECT session_id
FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
@ -68,3 +47,18 @@ def favorite_session_exists(user_id, session_id):
)
r = cur.fetchone()
return r is not None
def get_start_end_timestamp(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
WHERE
user_favorite_sessions.user_id = %(userId)s
AND project_id = %(project_id)s;""",
{"userId": user_id, "project_id": project_id})
)
r = cur.fetchone()
return 0, 0 if r is None else r["max_start_ts"], r["min_start_ts"]

View file

@ -1,206 +1,66 @@
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.core import autocomplete
from chalicelib.utils.event_filter_definition import SupportedFilter
def get_key_values(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT ARRAY_AGG(DISTINCT s.user_os
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='web') AS {schemas.FilterType.user_os},
ARRAY_AGG(DISTINCT s.user_browser
ORDER BY s.user_browser)
FILTER ( WHERE s.user_browser IS NOT NULL AND s.platform='web') AS {schemas.FilterType.user_browser},
ARRAY_AGG(DISTINCT s.user_device
ORDER BY s.user_device)
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='web') AS {schemas.FilterType.user_device},
ARRAY_AGG(DISTINCT s.user_country
ORDER BY s.user_country)
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='web')::text[] AS {schemas.FilterType.user_country},
ARRAY_AGG(DISTINCT s.user_id
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='web') AS {schemas.FilterType.user_id},
ARRAY_AGG(DISTINCT s.user_anonymous_id
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='web') AS {schemas.FilterType.user_anonymous_id},
ARRAY_AGG(DISTINCT s.rev_id
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='web') AS {schemas.FilterType.rev_id},
ARRAY_AGG(DISTINCT p.referrer
ORDER BY p.referrer)
FILTER ( WHERE p.referrer != '' ) AS {schemas.FilterType.referrer},
ARRAY_AGG(DISTINCT s.utm_source
ORDER BY s.utm_source) FILTER ( WHERE s.utm_source IS NOT NULL AND s.utm_source != 'none' AND s.utm_source != '') AS {schemas.FilterType.utm_source},
ARRAY_AGG(DISTINCT s.utm_medium
ORDER BY s.utm_medium) FILTER ( WHERE s.utm_medium IS NOT NULL AND s.utm_medium != 'none' AND s.utm_medium != '') AS {schemas.FilterType.utm_medium},
ARRAY_AGG(DISTINCT s.utm_campaign
ORDER BY s.utm_campaign) FILTER ( WHERE s.utm_campaign IS NOT NULL AND s.utm_campaign != 'none' AND s.utm_campaign != '') AS {schemas.FilterType.utm_campaign},
ARRAY_AGG(DISTINCT s.user_os
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='ios' ) AS {schemas.FilterType.user_os_ios},
ARRAY_AGG(DISTINCT s.user_device
ORDER BY s.user_device)
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='ios') AS {schemas.FilterType.user_device_ios},
ARRAY_AGG(DISTINCT s.user_country
ORDER BY s.user_country)
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='ios')::text[] AS {schemas.FilterType.user_country_ios},
ARRAY_AGG(DISTINCT s.user_id
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='ios') AS {schemas.FilterType.user_id_ios},
ARRAY_AGG(DISTINCT s.user_anonymous_id
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='ios') AS {schemas.FilterType.user_anonymous_id_ios},
ARRAY_AGG(DISTINCT s.rev_id
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='ios') AS {schemas.FilterType.rev_id_ios}
FROM public.sessions AS s
LEFT JOIN events.pages AS p USING (session_id)
WHERE s.project_id = %(site_id)s;""",
{"site_id": project_id}
)
)
row = cur.fetchone()
for k in row.keys():
if row[k] is None:
row[k] = []
elif len(row[k]) > 500:
row[k] = row[k][:500]
return helper.dict_to_CAPITAL_keys(row)
def get_top_key_values(project_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT {",".join([f"ARRAY((SELECT value FROM public.autocomplete WHERE project_id = %(site_id)s AND type='{k}' GROUP BY value ORDER BY COUNT(*) DESC LIMIT %(limit)s)) AS {k}" for k in SUPPORTED_TYPES.keys()])};""",
{"site_id": project_id, "limit": 5}
)
)
row = cur.fetchone()
return helper.dict_to_CAPITAL_keys(row)
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f""" (SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
return f""" SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(__generic_query(typename,
value_length=len(text) \
if SUPPORTED_TYPES[typename].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
cur.execute(query)
rows = cur.fetchall()
return rows
return f
SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_ios)),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
get=autocomplete.__generic_autocomplete_metas(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
change_by_length=True),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_ios)),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_ios)),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_ios)),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_ios)),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
change_by_length=True),
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_ios)),
}
def search(text, meta_type, project_id):
def search(text: str, meta_type: schemas.FilterType, project_id: int):
rows = []
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}

View file

@ -0,0 +1,11 @@
from chalicelib.utils import pg_client
def view_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_sessions(user_id, session_id)
VALUES (%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)

View file

@ -559,8 +559,8 @@ def get_top_insights(filter_d, project_id):
"dropDueToIssues": 0
}]
counts = sessions.search2_pg(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), project_id=project_id,
user_id=None, count_only=True)
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), project_id=project_id,
user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"]
return output, 0

View file

@ -45,7 +45,7 @@ def create_step1(data: schemas.UserSignupSchema):
print("Verifying company's name validity")
company_name = data.organizationName
if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name):
if company_name is None or len(company_name) < 1:
errors.append("invalid organization's name")
print("Verifying project's name validity")

View file

@ -181,7 +181,7 @@ def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
if user:
return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is not None and not helper.is_alphabet_latin_space(name):
if name is not None and len(name) == 0:
return {"errors": ["invalid user name"]}
if name is None:
name = data["email"]

View file

@ -6,7 +6,6 @@ class Event:
class SupportedFilter:
def __init__(self, get, query, change_by_length):
def __init__(self, get, query):
self.get = get
self.query = query
self.change_by_length = change_by_length

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.10
boto3==1.24.26
boto3==1.24.53
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.3.1
elasticsearch==8.3.3
jira==3.3.1
fastapi==0.78.0
fastapi==0.80.0
uvicorn[standard]==0.18.2
python-decouple==3.6
pydantic[email]==1.9.1
pydantic[email]==1.9.2
apscheduler==3.9.1

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.10
boto3==1.24.26
boto3==1.24.53
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.3.1
elasticsearch==8.3.3
jira==3.3.1
fastapi==0.78.0
fastapi==0.80.0
uvicorn[standard]==0.18.2
python-decouple==3.6
pydantic[email]==1.9.1
pydantic[email]==1.9.2
apscheduler==3.9.1

View file

@ -1,4 +1,4 @@
from typing import Union, Optional
from typing import Union
from decouple import config
from fastapi import Depends, Body, BackgroundTasks, HTTPException
@ -7,13 +7,13 @@ from starlette import status
import schemas
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
sessions_metas, alerts, funnels, issues, integrations_manager, metadata, \
alerts, funnels, issues, integrations_manager, metadata, \
log_tool_elasticsearch, log_tool_datadog, \
log_tool_stackdriver, reset_password, sessions_favorite_viewed, \
log_tool_stackdriver, reset_password, sessions_favorite, \
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, users, \
custom_metrics, saved_search, integrations_global
assist, heatmaps, mobile, signup, tenants, errors_viewed, boarding, notifications, webhook, users, \
custom_metrics, saved_search, integrations_global, sessions_viewed, errors_favorite
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper, helper, captcha
from chalicelib.utils.TimeUTC import TimeUTC
@ -51,6 +51,14 @@ def login(data: schemas.UserLoginSchema = Body(...)):
}
@app.post('/{projectId}/sessions/search', tags=["sessions"])
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
@ -62,7 +70,7 @@ def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: B
if data is None:
return {"errors": ["session not found"]}
if data.get("inDB"):
background_tasks.add_task(sessions_favorite_viewed.view_session, project_id=projectId, user_id=context.user_id,
background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id,
session_id=sessionId)
return {
'data': data
@ -74,8 +82,8 @@ def get_session2(projectId: int, sessionId: Union[int, str], background_tasks: B
def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": sessions_favorite_viewed.favorite_session(project_id=projectId, user_id=context.user_id,
session_id=sessionId)}
"data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id,
session_id=sessionId)}
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
@ -164,23 +172,6 @@ def events_search(projectId: int, q: str,
return result
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.get('/{projectId}/sessions/filters', tags=["sessions"])
def session_filter_values(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {'data': sessions_metas.get_key_values(projectId)}
@app.get('/{projectId}/sessions/filters/top', tags=["sessions"])
def session_top_filter_values(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {'data': sessions_metas.get_top_key_values(projectId)}
@app.get('/{projectId}/integrations', tags=["integrations"])
def get_integrations_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = integrations_global.get_global_integrations_status(tenant_id=context.tenant_id,
@ -909,7 +900,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
if data is None:
return {"errors": ["session not found"]}
if data.get("inDB"):
background_tasks.add_task(sessions_favorite_viewed.view_session, project_id=projectId,
background_tasks.add_task(sessions_viewed.view_session, project_id=projectId,
user_id=context.user_id, session_id=sessionId)
return {'data': data}
@ -995,7 +986,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
if data.get("data") is not None:
background_tasks.add_task(errors_favorite_viewed.viewed_error, project_id=projectId, user_id=context.user_id,
background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id,
error_id=errorId)
return data
@ -1024,7 +1015,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite_viewed.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
elif action == "sessions":
start_date = startDate
end_date = endDate

View file

@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse
import schemas
from chalicelib.core import integrations_manager
from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import helper

View file

@ -554,13 +554,15 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
assert values.get("sourceOperator") is not None, \
"sourceOperator should not be null for PerformanceEventType"
if values["type"] == PerformanceEventType.time_between_events:
assert values["sourceOperator"] != MathOperator._equal.value, \
f"{MathOperator._equal} is not allowed for duration of {PerformanceEventType.time_between_events}"
assert len(values.get("value", [])) == 2, \
f"must provide 2 Events as value for {PerformanceEventType.time_between_events}"
assert isinstance(values["value"][0], _SessionSearchEventRaw) \
and isinstance(values["value"][1], _SessionSearchEventRaw), \
f"event should be of type _SessionSearchEventRaw for {PerformanceEventType.time_between_events}"
assert len(values["source"]) > 0 and isinstance(values["source"][0], int), \
f"source of type int if required for {PerformanceEventType.time_between_events}"
f"source of type int is required for {PerformanceEventType.time_between_events}"
else:
assert "source" in values, f"source is required for {values.get('type')}"
assert isinstance(values["source"], list), f"source of type list is required for {values.get('type')}"
@ -736,7 +738,7 @@ class ErrorSort(str, Enum):
sessions_count = 'sessions'
class SearchErrorsSchema(SessionsSearchPayloadSchema):
class SearchErrorsSchema(FlatSessionsSearchPayloadSchema):
sort: ErrorSort = Field(default=ErrorSort.occurrence)
density: Optional[int] = Field(7)
status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
@ -768,7 +770,7 @@ class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema, SearchErrorsSchema):
class CustomMetricSeriesFilterSchema(SearchErrorsSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)

8
ee/api/.gitignore vendored
View file

@ -177,11 +177,15 @@ chalicelib/saas
README/*
Pipfile
.local/*
/chalicelib/core/alerts.py
/chalicelib/core/alerts_processor.py
/chalicelib/core/announcements.py
/chalicelib/core/autocomplete.py
/chalicelib/core/collaboration_slack.py
/chalicelib/core/errors_favorite_viewed.py
/chalicelib/core/errors.py
/chalicelib/core/errors_favorite.py
/chalicelib/core/events.py
/chalicelib/core/events_ios.py
/chalicelib/core/funnels.py
@ -257,4 +261,4 @@ Pipfile
/build_alerts.sh
/routers/subs/metrics.py
/routers/subs/v1_api.py
/chalicelib/core/dashboards.py
/chalicelib/core/dashboards.py

View file

@ -15,13 +15,15 @@ class ProjectAuthorizer:
if len(request.path_params.keys()) == 0 or request.path_params.get(self.project_identifier) is None:
return
current_user: schemas.CurrentContext = await OR_context(request)
project_identifier = request.path_params[self.project_identifier]
value = request.path_params[self.project_identifier]
user_id = current_user.user_id if request.state.authorizer_identity == "jwt" else None
if (self.project_identifier == "projectId" \
and not projects.is_authorized(project_id=project_identifier, tenant_id=current_user.tenant_id,
and not projects.is_authorized(project_id=value, tenant_id=current_user.tenant_id,
user_id=user_id)) \
or (self.project_identifier.lower() == "projectKey" \
and not projects.is_authorized(project_id=projects.get_internal_project_id(project_identifier),
tenant_id=current_user.tenant_id, user_id=user_id)):
or (self.project_identifier == "projectKey" \
and not projects.is_authorized(
project_id=projects.get_internal_project_id(value),
tenant_id=current_user.tenant_id, user_id=user_id)):
print("unauthorized project")
print(value)
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="unauthorized project.")

View file

@ -0,0 +1,28 @@
from decouple import config
import logging
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
print(">>> Using experimental sessions search")
from . import sessions_exp as sessions
else:
from . import sessions as sessions
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
print(">>> Using experimental autocomplete")
from . import autocomplete_exp as autocomplete
else:
from . import autocomplete as autocomplete
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
print(">>> Using experimental error search")
from . import errors_exp as errors
else:
from . import errors as errors
if config("EXP_METRICS", cast=bool, default=False):
print(">>> Using experimental metrics")
from . import metrics_exp as metrics
else:
from . import metrics as metrics

View file

@ -0,0 +1,107 @@
import schemas
from chalicelib.utils import ch_client
from chalicelib.utils import helper
from chalicelib.utils.event_filter_definition import Event
TABLE = "final.autocomplete"
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM {TABLE}
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)""")
with ch_client.ClickHouseClient() as cur:
query = " UNION DISTINCT ".join(sub_queries) + ";"
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
results = []
try:
results = cur.execute(query=query, params=params)
except Exception as err:
print("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
print(cur.format(query=query, params=params))
print("--------- PARAMS -----------")
print(params)
print("--------- VALUE -----------")
print(value)
print("--------------------")
raise err
return results
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION DISTINCT
(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with ch_client.ClickHouseClient() as cur:
query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
results = cur.execute(query=query, params=params)
return helper.list_to_camel_case(results)
return f
def __generic_autocomplete_metas(typename):
def f(project_id, text):
with ch_client.ClickHouseClient() as cur:
query = __generic_query(typename, value_length=len(text))
params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)}
results = cur.execute(query=query, params=params)
return results
return f

View file

@ -1,13 +1,59 @@
import json
import schemas
from chalicelib.core import metrics
from chalicelib.core import metrics, metadata
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import ch_client, metrics_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
}.get(op, "=")
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def get(error_id, family=False):
if family:
return get_batch([error_id])
@ -263,10 +309,7 @@ def get_details(project_id, error_id, user_id, **data):
parent_error_id,session_id, user_anonymous_id,
user_id, user_uuid, user_browser, user_browser_version,
user_os, user_os_version, user_device, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE pe.error_id = fe.error_id
AND fe.user_id = %(userId)s), FALSE) AS favorite,
FALSE AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
@ -420,8 +463,10 @@ def get_details_chart(project_id, error_id, user_id, **data):
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate"):
endTime_arg_name="endDate", type_condition=True):
ch_sub_query = ["project_id =toUInt32(%(project_id)s)"]
if type_condition:
ch_sub_query.append("event_type='ERROR'")
if time_constraint:
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
@ -465,214 +510,217 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar
return ch_sub_query
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {'total': 0,
'errors': []
}
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
MAIN_EVENTS_TABLE = "final.events"
MAIN_SESSIONS_TABLE = "final.sessions"
if data.startDate >= TimeUTC.now(delta_days=-7):
MAIN_EVENTS_TABLE = "final.events_l7d_mv"
MAIN_SESSIONS_TABLE = "final.sessions_l7d_mv"
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
# To ignore Script error
pg_sub_query.append("pe.message!='Script error.'")
pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
chart
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"count": total}
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
# refactor this function after clickhouse structure changes (missing search by query)
def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
ch_sub_query = __get_basic_constraints(platform)
ch_sessions_sub_query = __get_basic_constraints(platform, type_condition=False)
ch_sub_query = __get_basic_constraints(platform, type_condition=True)
ch_sub_query.append("source ='js_exception'")
# To ignore Script error
ch_sub_query.append("message!='Script error.'")
statuses = []
error_ids = None
# Clickhouse keeps data for the past month only, so no need to search beyond that
if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
data.startDate = TimeUTC.now(-30)
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
subquery_part = ""
params = {}
if len(data.events) > 0:
errors_condition_count = 0
for i, e in enumerate(data.events):
if e.type == schemas.EventType.error:
errors_condition_count += 1
is_any = _isAny_opreator(e.operator)
op = __get_sql_operator(e.operator)
e_k = f"e_value{i}"
params = {**params, **_multiple_values(e.value, value_key=e_k)}
if not is_any and e.value not in [None, "*", ""]:
ch_sub_query.append(
_multiple_conditions(f"(message {op} %({e_k})s OR name {op} %({e_k})s)",
e.value, value_key=e_k))
if len(data.events) > errors_condition_count:
print("----------Sessions conditions")
subquery_part_args, subquery_part = sessions.search_query_parts_ch(data=data, error_status=data.status,
errors_only=True,
project_id=project_id, user_id=user_id,
issue=None,
favorite_only=False)
subquery_part = f"INNER JOIN {subquery_part} USING(session_id)"
params = {**params, **subquery_part_args}
if len(data.filters) > 0:
meta_keys = None
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
params = {**params, f_k: f.value, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
if __is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_browser)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_os)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_device)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_country)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_source)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.utm_source)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_medium)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.utm_medium)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_campaign)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.utm_campaign)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
ch_sessions_sub_query.append("s.duration >= %(minDuration)s")
params["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
ch_sessions_sub_query.append("s.duration <= %(maxDuration)s")
params["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
referrer_constraint = 'isNotNull(s.base_referrer)'
else:
referrer_constraint = _multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value,
is_not=is_not, value_key=f_k)
elif filter_type == schemas.FilterType.metadata:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if f.source in meta_keys.keys():
if is_any:
ch_sessions_sub_query.append(f"isNotNull(s.{metadata.index_to_colname(meta_keys[f.source])})")
elif is_undefined:
ch_sessions_sub_query.append(f"isNull(s.{metadata.index_to_colname(meta_keys[f.source])})")
else:
ch_sessions_sub_query.append(
_multiple_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_id)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.user_id)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_anonymous_id)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.user_anonymous_id)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f"s.user_anonymous_id {op} toString(%({f_k})s)", f.value,
is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.rev_id)')
elif is_undefined:
ch_sessions_sub_query.append('isNull(s.rev_id)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
# op = __get_sql_operator(f.operator)
ch_sessions_sub_query.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
# elif filter_type == schemas.FilterType.issue:
# if is_any:
# ch_sessions_sub_query.append("notEmpty(s.issue_types)")
# else:
# ch_sessions_sub_query.append(f"hasAny(s.issue_types,%({f_k})s)")
# # _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
# # value_key=f_k))
#
# if is_not:
# extra_constraints[-1] = f"not({extra_constraints[-1]})"
# ss_constraints[-1] = f"not({ss_constraints[-1]})"
elif filter_type == schemas.FilterType.events_count:
ch_sessions_sub_query.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
with ch_client.ClickHouseClient() as ch:
step_size = __get_step_size(data.startDate, data.endDate, data.density)
sort = __get_sort_key('datetime')
if data.sort is not None:
@ -681,6 +729,7 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo
if data.order is not None:
order = data.order
params = {
**params,
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
@ -692,118 +741,82 @@ def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flo
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if data.bookmarked:
cur.execute(cur.mogrify(f"""SELECT error_id
FROM public.user_favorite_errors
WHERE user_id = %(userId)s
{"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
{"userId": user_id, "error_ids": tuple(error_ids or [])}))
error_ids = cur.fetchall()
if len(error_ids) == 0:
return empty_response
error_ids = [e["error_id"] for e in error_ids]
# if data.bookmarked:
# cur.execute(cur.mogrify(f"""SELECT error_id
# FROM public.user_favorite_errors
# WHERE user_id = %(userId)s
# {"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
# {"userId": user_id, "error_ids": tuple(error_ids or [])}))
# error_ids = cur.fetchall()
# if len(error_ids) == 0:
# return empty_response
# error_ids = [e["error_id"] for e in error_ids]
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
ch_sub_query.append("error_id IN %(error_ids)s")
main_ch_query = f"""\
SELECT COUNT(DISTINCT error_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)};"""
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
total = ch.execute(query=main_ch_query, params=params)[0]["count"]
if flows:
return {"data": {"count": total}}
if total == 0:
rows = []
else:
main_ch_query = f"""\
SELECT details.error_id AS error_id, name, message, users, sessions, last_occurrence, first_occurrence, chart
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(datetime) AS max_datetime,
MIN(datetime) AS min_datetime
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT session_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, timestamp
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
SELECT details.error_id AS error_id,
name, message, users, total, viewed,
sessions, last_occurrence, first_occurrence, chart
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_id) AS users,
COUNT(DISTINCT events.session_id) AS sessions,
MAX(datetime) AS max_datetime,
MIN(datetime) AS min_datetime,
COUNT(DISTINCT events.error_id) OVER() AS total,
any(isNotNull(viewed_error_id)) AS viewed
FROM {MAIN_EVENTS_TABLE} AS events
LEFT JOIN (SELECT error_id AS viewed_error_id
FROM final.user_viewed_errors
WHERE project_id=%(project_id)s
AND user_id=%(userId)s) AS viewed_errors ON(events.error_id=viewed_errors.viewed_error_id)
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
FROM {MAIN_SESSIONS_TABLE} AS s
{subquery_part}
WHERE {" AND ".join(ch_sessions_sub_query)}) AS sessions
ON (events.session_id = sessions.session_id)
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id AS error_id,
toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence,
toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM {MAIN_EVENTS_TABLE}
WHERE project_id=%(project_id)s
AND event_type='ERROR'
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT session_id) AS count
FROM {MAIN_EVENTS_TABLE}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, timestamp
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
print("------------")
print(ch.format(main_ch_query, params))
print("------------")
rows = ch.execute(query=main_ch_query, params=params)
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"userId": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
rows = ch.execute(query=main_ch_query, params=params)
total = rows[0]["total"] if len(rows) > 0 else 0
for r in rows:
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
r["chart"] = list(r["chart"])
for i in range(len(r["chart"])):
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
r["chart"] = metrics.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
end_time=data.endDate,
density=data.density, neutral={"count": 0})
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
'total': total,
'errors': helper.list_to_camel_case(rows)
}

View file

@ -0,0 +1,39 @@
from chalicelib.utils import pg_client
from chalicelib.core import errors_viewed_exp
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
errors_viewed_exp.add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE ve.error_id = %(error_id)s
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
query=query
)
r = cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -0,0 +1,15 @@
import logging
from decouple import config
from chalicelib.utils import ch_client, exp_ch_helper
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
def add_viewed_error(project_id, user_id, error_id):
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_errors_table()}(project_id,user_id, error_id)
VALUES (%(project_id)s,%(userId)s,%(error_id)s);"""
params = {"userId": user_id, "error_id": error_id, "project_id": project_id}
cur.execute(query=query, params=params)

View file

@ -167,9 +167,8 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""\
SELECT
toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(sessions.session_id) AS value
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT sessions.session_id) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -191,7 +190,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
endTimestamp = startTimestamp
startTimestamp = endTimestamp - diff
ch_query = f""" SELECT COUNT(sessions.session_id) AS count
ch_query = f""" SELECT COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp,
@ -278,7 +277,7 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_query = f"""SELECT *
FROM (SELECT errors.error_id AS error_id,
errors.message AS error,
COUNT(errors.session_id) AS count,
COUNT(1) AS count,
COUNT(DISTINCT errors.session_id) AS sessions
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
@ -293,7 +292,7 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
print(f"got {len(rows)} rows")
# print(f"got {len(rows)} rows")
if len(rows) == 0:
return []
error_ids = [r["error_id"] for r in rows]
@ -302,7 +301,7 @@ def get_errors_trend(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
for error_id in error_ids:
ch_query = f"""\
SELECT toUnixTimestamp(toStartOfInterval(errors.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(errors.session_id) AS count
COUNT(1) AS count
FROM errors {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
@ -461,11 +460,11 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
COALESCE(avgOrNull(resources.duration),0) AS avg,
COUNT(resources.session_id) AS count
COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
@ -482,8 +481,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
GROUP BY url, timestamp
ORDER BY url, timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "url": urls, **__get_constraint_values(args)}
params["url"] = urls
u_rows = ch.execute(query=ch_query, params=params)
for url in urls:
sub_rows = []
@ -783,27 +781,28 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query.append("resources.success = 0")
ch_sub_query.append("resources.type != 'fetch'")
ch_sub_query.append("resources.type = 'img'")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url_hostpath AS key,
COUNT(resources.session_id) AS doc_count
COUNT(1) AS doc_count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY url_hostpath
ORDER BY doc_count DESC
LIMIT 10;"""
rows = ch.execute(query=ch_query, params={"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
rows = [{"url": i["key"], "sessions": i["doc_count"]} for i in rows]
if len(rows) == 0:
return []
ch_sub_query.append("resources.url_hostpath = %(value)s")
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COUNT(resources.session_id) AS doc_count,
COUNT(1) AS doc_count,
toUnixTimestamp(MAX(resources.datetime))*1000 AS max_datatime
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
@ -813,13 +812,8 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
e["startedAt"] = startTimestamp
e["startTimestamp"] = startTimestamp
e["endTimestamp"] = endTimestamp
r = ch.execute(query=ch_query,
params={"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": e["url"],
**__get_constraint_values(args)})
params["value"] = e["url"]
r = ch.execute(query=ch_query, params=params)
e["endedAt"] = r[-1]["max_datatime"]
e["chart"] = [{"timestamp": i["timestamp"], "count": i["doc_count"]} for i in
@ -840,15 +834,16 @@ def get_network(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
resources.url_hostpath, COUNT(resources.session_id) AS doc_count
resources.url_hostpath, COUNT(1) AS doc_count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp, resources.url_hostpath
ORDER BY timestamp;"""
r = ch.execute(query=ch_query,
params={"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
ORDER BY timestamp, doc_count DESC
LIMIT 10 BY timestamp;"""
params = {"step_size": step_size, "project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
r = ch.execute(query=ch_query, params=params)
results = []
@ -956,6 +951,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), type="all", density=19, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query.append("isNotNull(resources.url_hostpath)")
ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
@ -1025,15 +1021,15 @@ def get_sessions_location(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT user_country, COUNT(session_id) AS count
ch_query = f"""SELECT user_country, COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_country
ORDER BY user_country;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return {"count": sum(i["count"] for i in rows), "chart": helper.list_to_camel_case(rows)}
@ -1108,30 +1104,24 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT pages.response_time AS response_time,
COUNT(pages.session_id) AS count
COUNT(1) AS count
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY response_time
ORDER BY response_time;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT COALESCE(avgOrNull(pages.response_time),0) AS avg
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})[0]["avg"]
avg = ch.execute(query=ch_query, params=params)[0]["avg"]
quantiles_keys = [50, 90, 95, 99]
ch_query = f"""SELECT quantilesExact({",".join([str(i / 100) for i in quantiles_keys])})(pages.response_time) AS values
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
quantiles = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
quantiles = ch.execute(query=ch_query, params=params)
result = {
"value": avg,
"total": sum(r["count"] for r in rows),
@ -1228,15 +1218,15 @@ def get_busiest_time_of_day(project_id, startTimestamp=TimeUTC.now(delta_days=-1
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT intDiv(toHour(sessions.datetime),2)*2 AS hour,
COUNT(sessions.session_id) AS count
COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY hour
ORDER BY hour ASC;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return __complete_missing_steps(rows=rows, start_time=0, end_time=24000, density=12,
neutral={"count": 0},
time_key="hour", time_coefficient=1)
@ -1251,17 +1241,24 @@ def get_top_metrics(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
if value is not None:
ch_sub_query.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT (SELECT COALESCE(avgOrNull(pages.response_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.response_time) AND pages.response_time>0) AS avg_response_time,
(SELECT COUNT(pages.session_id) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)}) AS count_requests,
(SELECT COALESCE(avgOrNull(pages.first_paint),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.first_paint) AND pages.first_paint>0) AS avg_first_paint,
(SELECT COALESCE(avgOrNull(pages.dom_content_loaded_event_time),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0) AS avg_dom_content_loaded,
(SELECT COALESCE(avgOrNull(pages.ttfb),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.ttfb) AND pages.ttfb>0) AS avg_till_first_bit,
(SELECT COALESCE(avgOrNull(pages.time_to_interactive),0) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0) AS avg_time_to_interactive;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)})
ch_query = f"""SELECT COALESCE(avgOrNull(if(pages.response_time>0,pages.response_time,null)),0) AS avg_response_time,
COALESCE(avgOrNull(if(pages.first_paint>0,pages.first_paint,null)),0) AS avg_first_paint,
COALESCE(avgOrNull(if(pages.dom_content_loaded_event_time>0,pages.dom_content_loaded_event_time,null)),0) AS avg_dom_content_loaded,
COALESCE(avgOrNull(if(pages.ttfb>0,pages.ttfb,null)),0) AS avg_till_first_bit,
COALESCE(avgOrNull(if(pages.time_to_interactive>0,pages.time_to_interactive,null)),0) AS avg_time_to_interactive,
(SELECT COUNT(1) FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)}) AS count_requests
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
AND (isNotNull(pages.response_time) AND pages.response_time>0 OR
isNotNull(pages.first_paint) AND pages.first_paint>0 OR
isNotNull(pages.dom_content_loaded_event_time) AND pages.dom_content_loaded_event_time>0 OR
isNotNull(pages.ttfb) AND pages.ttfb>0 OR
isNotNull(pages.time_to_interactive) AND pages.time_to_interactive >0);"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"value": value, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.dict_to_camel_case(rows[0])
@ -1461,17 +1458,17 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(sessions.session_id) AS value
COUNT(1) AS value
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"session_ids": session_ids, **__get_constraint_values(args)})
params = {"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"session_ids": session_ids, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
ch_query = f"""SELECT b.user_browser AS browser,
sum(bv.count) AS total,
groupArray([bv.user_browser_version, toString(bv.count)]) AS versions
@ -1480,14 +1477,14 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY sessions.user_browser
ORDER BY COUNT(sessions.session_id) DESC
ORDER BY COUNT(1) DESC
LIMIT 3
) AS b
INNER JOIN
(
SELECT sessions.user_browser,
sessions.user_browser_version,
COUNT(sessions.session_id) AS count
COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY sessions.user_browser,
@ -1496,12 +1493,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
) AS bv USING (user_browser)
GROUP BY b.user_browser
ORDER BY b.user_browser;"""
browsers = ch.execute(query=ch_query,
params={"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"session_ids": session_ids, **__get_constraint_values(args)})
browsers = ch.execute(query=ch_query, params=params)
total = sum(r["total"] for r in browsers)
for r in browsers:
r["percentage"] = r["total"] / (total / 100)
@ -1546,12 +1538,12 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_query = f"""SELECT timestamp,
groupArray([domain, toString(count)]) AS keys
FROM (SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
resources.url_host AS domain, COUNT(resources.session_id) AS count
resources.url_host AS domain, COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp,resources.url_host
ORDER BY timestamp, count DESC
LIMIT 5) AS domain_stats
LIMIT 5 BY timestamp) AS domain_stats
GROUP BY timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
@ -1577,8 +1569,8 @@ def get_domains_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
return result
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", round_start=True, data=args)
ch_sub_query.append("intDiv(resources.status, 100) == %(status_code)s")
@ -1589,18 +1581,18 @@ def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
ch_query = f"""SELECT timestamp,
groupArray([domain, toString(count)]) AS keys
FROM (SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
resources.url_host AS domain, COUNT(resources.session_id) AS count
resources.url_host AS domain, COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp,resources.url_host
ORDER BY timestamp, count DESC
LIMIT 5) AS domain_stats
LIMIT 5 BY timestamp) AS domain_stats
GROUP BY timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 4, **__get_constraint_values(args)}
"status_code": status, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
rows = __nested_array_to_dict_array(rows)
neutral = __get_domains_errors_neutral(rows)
@ -1611,38 +1603,16 @@ def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
density=density, neutral=neutral)
def get_domains_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
return __get_domains_errors_4xx_and_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp, density=density, **args)
def get_domains_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(), density=6, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", round_start=True, data=args)
ch_sub_query.append("intDiv(resources.status, 100) == %(status_code)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT timestamp,
groupArray([domain, toString(count)]) AS keys
FROM (SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
resources.url_host AS domain, COUNT(resources.session_id) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp,resources.url_host
ORDER BY timestamp, count DESC
LIMIT 5) AS domain_stats
GROUP BY timestamp;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp,
"step_size": step_size,
"status_code": 5, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
rows = __nested_array_to_dict_array(rows)
neutral = __get_domains_errors_neutral(rows)
rows = __merge_rows_with_neutral(rows, neutral)
return __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral=neutral)
return __get_domains_errors_4xx_and_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp, density=density, **args)
def __nested_array_to_dict_array(rows):
@ -1690,16 +1660,16 @@ def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT
resources.url_host AS domain,
COUNT(resources.session_id) AS errors_count
COUNT(1) AS errors_count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.url_host
ORDER BY errors_count DESC
LIMIT 5;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.list_to_camel_case(rows)
@ -1716,7 +1686,7 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM
(
SELECT sessions.user_browser,
COUNT(sessions.session_id) AS count
COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY sessions.user_browser
@ -1727,7 +1697,7 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
(
SELECT sessions.user_browser,
sessions.user_browser_version,
COUNT(sessions.session_id) AS count
COUNT(1) AS count
FROM sessions {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY
@ -1739,10 +1709,10 @@ def get_sessions_per_browser(project_id, startTimestamp=TimeUTC.now(delta_days=-
GROUP BY
b.user_browser, b.count
ORDER BY b.count DESC;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
for i, r in enumerate(rows):
versions = {}
for j in range(len(r["versions"])):
@ -1763,67 +1733,58 @@ def get_calls_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endT
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests,
COUNT(1) AS all_requests,
SUM(if(intDiv(resources.status, 100) == 4, 1, 0)) AS _4xx,
SUM(if(intDiv(resources.status, 100) == 5, 1, 0)) AS _5xx
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY (_4xx + _5xx), all_requests DESC
ORDER BY (_4xx + _5xx) DESC, all_requests DESC
LIMIT 50;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.list_to_camel_case(rows)
def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
platform=None, **args):
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query.append("resources.type = 'fetch'")
ch_sub_query.append(f"intDiv(resources.status, 100) == {status}")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(1) AS all_requests
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY all_requests DESC
LIMIT 10;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.list_to_camel_case(rows)
def get_calls_errors_4xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query.append("resources.type = 'fetch'")
ch_sub_query.append("intDiv(resources.status, 100) == 4")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY all_requests DESC
LIMIT 10;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
return helper.list_to_camel_case(rows)
return __get_calls_errors_4xx_or_5xx(status=4, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_calls_errors_5xx(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, **args):
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query.append("resources.type = 'fetch'")
ch_sub_query.append("intDiv(resources.status, 100) == 5")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.method,
resources.url_hostpath,
COUNT(resources.session_id) AS all_requests
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.method, resources.url_hostpath
ORDER BY all_requests DESC
LIMIT 10;"""
rows = ch.execute(query=ch_query,
params={"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
return helper.list_to_camel_case(rows)
return __get_calls_errors_4xx_or_5xx(status=5, project_id=project_id, startTimestamp=startTimestamp,
endTimestamp=endTimestamp,
platform=platform, **args)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
@ -1866,15 +1827,11 @@ def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), e
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = helper.list_to_camel_case(ch.execute(query=ch_query, params=params))
for r in rows:
print(r)
return __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
neutral={"4xx": 0,
"5xx": 0,
"js": 0,
"integrations": 0})
neutral={"4xx": 0, "5xx": 0, "js": 0, "integrations": 0})
def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1894,7 +1851,7 @@ def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(delta_d
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(resources.session_id) AS total,
COUNT(1) AS total,
SUM(if(resources.type='fetch',1,0)) AS xhr
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
@ -1962,10 +1919,8 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
endTimestamp=TimeUTC.now(), density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", data=args)
ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(s.base_datetime, toIntervalSecond(%(step_size)s))) * 1000 AS timestamp,
@ -1974,27 +1929,27 @@ def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.now(de
FROM
( SELECT resources.session_id,
MIN(resources.datetime) AS base_datetime,
COUNT(resources.url) AS count
COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.session_id
) AS s
INNER JOIN
(SELECT session_id,
type,
COALESCE(avgOrNull(NULLIF(count,0)),0) AS xavg
FROM (SELECT resources.session_id, resources.type, COUNT(resources.url) AS count
FROM (SELECT resources.session_id, resources.type, COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY resources.session_id, resources.type) AS ss
GROUP BY ss.session_id, ss.type) AS t USING (session_id)
GROUP BY timestamp
ORDER BY timestamp ASC;"""
rows = ch.execute(query=ch_query,
params={"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
for r in rows:
types = {}
for i in range(len(r["types"])):
@ -2030,17 +1985,17 @@ def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_day
groupArray([toString(t.type), toString(t.count)]) AS types
FROM(SELECT toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
resources.type,
COUNT(resources.session_id) AS count
COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp,resources.type
ORDER BY timestamp) AS t
GROUP BY timestamp;"""
rows = ch.execute(query=ch_query,
params={"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
for r in rows:
for t in r["types"]:
r[t[0]] = t[1]
@ -2056,6 +2011,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
step_size = __get_step_size(startTimestamp, endTimestamp, density)
ch_sub_query = __get_basic_constraints(table_name="resources", round_start=True, data=args)
ch_sub_query.append("resources.success = 0")
ch_sub_query.append("resources.type IN ('fetch','script')")
sch_sub_query = ["rs.project_id =toUInt32(%(project_id)s)", "rs.type IN ('fetch','script')"]
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
@ -2063,8 +2019,8 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(sub_resources.datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
SUM(if(first.url_host = sub_resources.url_host, 1, 0)) AS first_party,
SUM(if(first.url_host = sub_resources.url_host, 0, 1)) AS third_party
SUM(first.url_host = sub_resources.url_host) AS first_party,
SUM(first.url_host != sub_resources.url_host) AS third_party
FROM
(
SELECT resources.datetime, resources.url_host
@ -2075,7 +2031,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
(
SELECT
rs.url_host,
COUNT(rs.session_id) AS count
COUNT(1) AS count
FROM resources AS rs
WHERE {" AND ".join(sch_sub_query)}
GROUP BY rs.url_host
@ -2084,11 +2040,11 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
) AS first
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query,
params={"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)})
params = {"step_size": step_size,
"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
return helper.list_to_camel_case(__complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density,
@ -2476,7 +2432,7 @@ def __get_user_activity_avg_visited_pages(ch, project_id, startTimestamp, endTim
ch_sub_query += meta_condition
ch_query = f"""SELECT COALESCE(CEIL(avgOrNull(count)),0) AS value
FROM (SELECT COUNT(session_id) AS count
FROM (SELECT COUNT(1) AS count
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)}
GROUP BY session_id) AS groupped_data
@ -2496,10 +2452,10 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
ch_sub_query_chart += meta_condition
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
ch_query = f"""SELECT timestamp, COALESCE(avgOrNull(count), 0) AS value
FROM (SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
session_id, COUNT(pages.session_id) AS count
session_id, COUNT(1) AS count
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp,session_id
@ -2507,7 +2463,7 @@ def __get_user_activity_avg_visited_pages_chart(ch, project_id, startTimestamp,
WHERE count>0
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = ch.execute(query=ch_query, params=params)
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
density=density, neutral={"value": 0})
@ -2604,11 +2560,11 @@ def get_top_metrics_avg_response_time(project_id, startTimestamp=TimeUTC.now(del
rows = ch.execute(query=ch_query, params=params)
results = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COUNT(pages.response_time) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.response_time) AND pages.response_time>0
GROUP BY timestamp
ORDER BY timestamp;"""
COALESCE(avgOrNull(pages.response_time),0) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND isNotNull(pages.response_time) AND pages.response_time>0
GROUP BY timestamp
ORDER BY timestamp;"""
rows = ch.execute(query=ch_query, params={**params, **__get_constraint_values(args)})
rows = __complete_missing_steps(rows=rows, start_time=startTimestamp,
end_time=endTimestamp,
@ -2631,7 +2587,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
ch_sub_query.append("pages.url_path = %(value)s")
ch_sub_query_chart.append("pages.url_path = %(value)s")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT COUNT(pages.session_id) AS value
ch_query = f"""SELECT COUNT(1) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)};"""
params = {"step_size": step_size, "project_id": project_id,
@ -2641,7 +2597,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
rows = ch.execute(query=ch_query, params=params)
result = rows[0]
ch_query = f"""SELECT toUnixTimestamp(toStartOfInterval(pages.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COUNT(pages.session_id) AS value
COUNT(1) AS value
FROM pages {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)}
GROUP BY timestamp

File diff suppressed because it is too large Load diff

View file

@ -52,29 +52,57 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
) AS role_project ON (TRUE)"""
recorded_q = ""
extra_projection = ""
extra_join = ""
if gdpr:
extra_projection += ',s.gdpr'
if recorded:
recorded_q = """, COALESCE((SELECT TRUE
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM s.created_at) * 1000 - 24 * 60 * 60 * 1000)
AND sessions.start_ts <= %(now)s
LIMIT 1), FALSE) AS recorded"""
query = cur.mogrify(f"""\
SELECT
s.project_id, s.name, s.project_key, s.save_request_payloads
{',s.gdpr' if gdpr else ''}
{recorded_q}
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
FROM public.projects AS s
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
{role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
ORDER BY s.project_id;""",
extra_projection += """,COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT ,
(SELECT MIN(sessions.start_ts)
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM
COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-24*60*60*1000)
AND sessions.start_ts <= %(now)s
LIMIT 1), NULL) AS first_recorded"""
if stack_integrations:
extra_projection += ',stack_integrations.count>0 AS stack_integrations'
if stack_integrations:
extra_join = """LEFT JOIN LATERAL (SELECT COUNT(*) AS count
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at
{extra_projection}
FROM public.projects AS s
{extra_join}
{role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
ORDER BY s.project_id {") AS raw" if recorded else ""};""",
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now()})
cur.execute(query)
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
if recorded:
for r in rows:
if r["first_recorded_session_at"] is None:
extra_update = ""
if r["recorded"]:
extra_update = ", first_recorded_session_at=to_timestamp(%(first_recorded)s/1000)"
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc')
{extra_update}
WHERE project_id=%(project_id)s""",
{"project_id": r["project_id"], "first_recorded": r["first_recorded"]})
cur.execute(query)
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if recording_state:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last

File diff suppressed because it is too large Load diff

View file

@ -1,18 +1,19 @@
from chalicelib.core import sessions
from chalicelib.utils import pg_client, s3_extra
from decouple import config
from chalicelib.core import sessions, sessions_favorite_exp
from chalicelib.utils import pg_client, s3_extra
def add_favorite_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
INSERT INTO public.user_favorite_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s);""",
INSERT INTO public.user_favorite_sessions(user_id, session_id)
VALUES (%(userId)s,%(sessionId)s);""",
{"userId": user_id, "sessionId": session_id})
)
sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
include_fav_viewed=True)
@ -22,28 +23,15 @@ def remove_favorite_session(project_id, user_id, session_id):
cur.execute(
cur.mogrify(f"""\
DELETE FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
WHERE user_id = %(userId)s
AND session_id = %(sessionId)s;""",
{"userId": user_id, "sessionId": session_id})
)
sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
include_fav_viewed=True)
def add_viewed_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.user_viewed_sessions
(user_id, session_id)
VALUES
(%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
def favorite_session(project_id, user_id, session_id):
if favorite_session_exists(user_id=user_id, session_id=session_id):
key = str(session_id)
@ -74,16 +62,11 @@ def favorite_session(project_id, user_id, session_id):
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
def view_session(project_id, user_id, session_id):
return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id)
def favorite_session_exists(user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT
session_id
"""SELECT session_id
FROM public.user_favorite_sessions
WHERE
user_id = %(userId)s
@ -92,3 +75,18 @@ def favorite_session_exists(user_id, session_id):
)
r = cur.fetchone()
return r is not None
def get_start_end_timestamp(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT max(start_ts) AS max_start_ts, min(start_ts) AS min_start_ts
FROM public.user_favorite_sessions INNER JOIN sessions USING(session_id)
WHERE
user_favorite_sessions.user_id = %(userId)s
AND project_id = %(project_id)s;""",
{"userId": user_id, "project_id": project_id})
)
r = cur.fetchone()
return 0, 0 if r is None else r["max_start_ts"], r["min_start_ts"]

View file

@ -0,0 +1,24 @@
import logging
from decouple import config
from chalicelib.utils import ch_client, exp_ch_helper
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
def add_favorite_session(project_id, user_id, session_id, sign=1):
try:
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
cur.execute(query=query, params=params)
except Exception as err:
logging.error("------- Exception while adding favorite session to CH")
logging.error(err)
def remove_favorite_session(project_id, user_id, session_id):
add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, sign=-1)

View file

@ -0,0 +1,13 @@
from chalicelib.core import sessions_viewed_exp
from chalicelib.utils import pg_client
def view_session(project_id, user_id, session_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_sessions (user_id, session_id)
VALUES (%(userId)s,%(sessionId)s)
ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
sessions_viewed_exp.view_session(project_id=project_id, user_id=user_id, session_id=session_id)

View file

@ -0,0 +1,17 @@
from chalicelib.utils import ch_client, exp_ch_helper
import logging
from decouple import config
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
def view_session(project_id, user_id, session_id):
try:
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_sessions_table()}(project_id, user_id, session_id)
VALUES (%(project_id)s,%(userId)s,%(sessionId)s);"""
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id}
cur.execute(query=query, params=params)
except Exception as err:
logging.error("------- Exception while adding viewed session to CH")
logging.error(err)

View file

@ -43,7 +43,7 @@ def create_step1(data: schemas.UserSignupSchema):
print("Verifying company's name validity")
company_name = data.organizationName
if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name):
if company_name is None or len(company_name) < 1:
errors.append("invalid organization's name")
print("Verifying project's name validity")

View file

@ -212,7 +212,7 @@ def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
if user:
return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is not None and not helper.is_alphabet_latin_space(name):
if name is not None and len(name) == 0:
return {"errors": ["invalid user name"]}
if name is None:
name = data["email"]

View file

@ -1,6 +1,19 @@
import logging
import clickhouse_driver
from decouple import config
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
class ClickHouseClient:
__client = None
@ -8,16 +21,23 @@ class ClickHouseClient:
def __init__(self):
self.__client = clickhouse_driver.Client(host=config("ch_host"),
database="default",
port=config("ch_port", cast=int)) \
port=config("ch_port", cast=int),
settings=settings) \
if self.__client is None else self.__client
def __enter__(self):
return self
def execute(self, query, params=None, **args):
results = self.__client.execute(query=query, params=params, with_column_types=True, **args)
keys = tuple(x for x, y in results[1])
return [dict(zip(keys, i)) for i in results[0]]
try:
results = self.__client.execute(query=query, params=params, with_column_types=True, **args)
keys = tuple(x for x, y in results[1])
return [dict(zip(keys, i)) for i in results[0]]
except Exception as err:
logging.error("--------- CH QUERY EXCEPTION -----------")
logging.error(self.format(query=query, params=params))
logging.error("--------------------")
raise err
def insert(self, query, params=None, **args):
return self.__client.execute(query=query, params=params, **args)

View file

@ -0,0 +1,42 @@
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
import logging
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
if config("EXP_7D_MV", cast=bool, default=True):
print(">>> Using experimental last 7 days materialized views")
def get_main_events_table(timestamp):
return "experimental.events_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events"
def get_main_sessions_table(timestamp):
return "experimental.sessions_l7d_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
def get_main_resources_table(timestamp):
return "experimental.resources_l7s_mv" \
if config("EXP_7D_MV", cast=bool, default=True) \
and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.resources"
def get_autocomplete_table(timestamp=0):
return "experimental.autocomplete"
def get_user_favorite_sessions_table(timestamp=0):
return "experimental.user_favorite_sessions"
def get_user_viewed_sessions_table(timestamp=0):
return "experimental.user_viewed_sessions"
def get_user_viewed_errors_table(timestamp=0):
return "experimental.user_viewed_errors"

View file

@ -3,8 +3,10 @@
rm -rf ./chalicelib/core/alerts.py
rm -rf ./chalicelib/core/alerts_processor.py
rm -rf ./chalicelib/core/announcements.py
rm -rf ./chalicelib/core/autocomplete.py
rm -rf ./chalicelib/core/collaboration_slack.py
rm -rf ./chalicelib/core/errors_favorite_viewed.py
rm -rf ./chalicelib/core/errors.py
rm -rf ./chalicelib/core/errors_favorite.py
rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_ios.py
rm -rf ./chalicelib/core/dashboards.py

View file

@ -21,6 +21,8 @@ captcha_key=
captcha_server=
ch_host=
ch_port=
ch_timeout=30
ch_receive_timeout=10
change_password_link=/reset-password?invitation=%s&&pass=%s
email_basic=http://127.0.0.1:8000/async/basic/%s
email_plans=http://127.0.0.1:8000/async/plans/%s
@ -58,4 +60,9 @@ sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
stage=default-ee
version_number=1.0.0
FS_DIR=/mnt/efs
FS_DIR=/mnt/efs
EXP_SESSIONS_SEARCH=true
EXP_AUTOCOMPLETE=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_7D_MV=true

View file

@ -1,17 +1,17 @@
requests==2.28.1
urllib3==1.26.10
boto3==1.24.26
boto3==1.24.53
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.3.1
elasticsearch==8.3.3
jira==3.3.1
fastapi==0.78.0
fastapi==0.80.0
uvicorn[standard]==0.18.2
python-decouple==3.6
pydantic[email]==1.9.1
pydantic[email]==1.9.2
apscheduler==3.9.1
clickhouse-driver==0.2.4

View file

@ -1,17 +1,17 @@
requests==2.28.1
urllib3==1.26.10
boto3==1.24.26
boto3==1.24.53
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.3.1
elasticsearch==8.3.3
jira==3.3.1
fastapi==0.78.0
fastapi==0.80.0
uvicorn[standard]==0.18.2
python-decouple==3.6
pydantic[email]==1.9.1
pydantic[email]==1.9.2
apscheduler==3.9.1
clickhouse-driver==0.2.4

View file

@ -1,17 +1,17 @@
requests==2.28.1
urllib3==1.26.10
boto3==1.24.26
boto3==1.24.53
pyjwt==2.4.0
psycopg2-binary==2.9.3
elasticsearch==8.3.1
elasticsearch==8.3.3
jira==3.3.1
fastapi==0.78.0
fastapi==0.80.0
uvicorn[standard]==0.18.2
python-decouple==3.6
pydantic[email]==1.9.1
pydantic[email]==1.9.2
apscheduler==3.9.1
clickhouse-driver==0.2.4

View file

@ -43,3 +43,27 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
class Config:
alias_generator = schemas.attribute_to_camel_case
class SessionModel(BaseModel):
viewed: bool = Field(default=False)
userId: Optional[str]
userOs: str
duration: int
favorite: bool = Field(default=False)
platform: str
startTs: int
userUuid: str
projectId: int
sessionId: str
issueScore: int
issueTypes: List[schemas.IssueType] = Field(default=[])
pagesCount: int
userDevice: Optional[str]
errorsCount: int
eventsCount: int
userBrowser: str
userCountry: str
userDeviceType: str
userAnonymousId: Optional[str]
metadata: dict = Field(default={})

View file

@ -0,0 +1 @@
CREATE DATABASE IF NOT EXISTS experimental;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.autocomplete
(
project_id UInt16,
type LowCardinality(String),
value String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;

View file

@ -0,0 +1,72 @@
CREATE TABLE IF NOT EXISTS experimental.events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'LONGTASK'=5,'ERROR'=6,'CUSTOM'=7, 'GRAPHQL'=8, 'STATEACTION'=9),
datetime DateTime,
label Nullable(String),
hesitation_time Nullable(UInt32),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
message Nullable(String),
error_id Nullable(String),
duration Nullable(UInt16),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)),
container_id Nullable(String),
container_name Nullable(String),
container_src Nullable(String),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String) MATERIALIZED lower(pathFull(url)),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
dom_content_loaded_event_start Nullable(UInt16),
dom_content_loaded_event_end Nullable(UInt16),
load_event_start Nullable(UInt16),
load_event_end Nullable(UInt16),
first_paint Nullable(UInt16),
first_contentful_paint_time Nullable(UInt16),
speed_index Nullable(UInt16),
visually_complete Nullable(UInt16),
time_to_interactive Nullable(UInt16),
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
minus(response_start, request_start), Null),
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
minus(response_end, request_start), Null),
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
minus(response_end, response_start), Null),
dom_building_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_start, response_end),
minus(dom_content_loaded_event_start, response_end), Null),
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
minus(load_event_end, load_event_start), Null),
min_fps Nullable(UInt8),
avg_fps Nullable(UInt8),
max_fps Nullable(UInt8),
min_cpu Nullable(UInt8),
avg_cpu Nullable(UInt8),
max_cpu Nullable(UInt8),
min_total_js_heap_size Nullable(UInt64),
avg_total_js_heap_size Nullable(UInt64),
max_total_js_heap_size Nullable(UInt64),
min_used_js_heap_size Nullable(UInt64),
avg_used_js_heap_size Nullable(UInt64),
max_used_js_heap_size Nullable(UInt64),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id)
TTL datetime + INTERVAL 3 MONTH;

View file

@ -0,0 +1,25 @@
CREATE TABLE IF NOT EXISTS experimental.resources
(
session_id UInt64,
project_id UInt16,
datetime DateTime,
url String,
url_host String MATERIALIZED lower(domain(url)),
url_path String MATERIALIZED lower(path(url)),
url_hostpath String MATERIALIZED concat(url_host, url_path),
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
nullIf(splitByChar('/', url_path)[-2], ''))),
duration Nullable(UInt16),
ttfb Nullable(UInt16),
header_size Nullable(UInt16),
encoded_body_size Nullable(UInt32),
decoded_body_size Nullable(UInt32),
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
success Nullable(UInt8) COMMENT 'currently available for type=img only',
_timestamp DateTime DEFAULT now()
) ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, type, session_id)
TTL datetime + INTERVAL 3 MONTH;

View file

@ -0,0 +1,42 @@
CREATE TABLE IF NOT EXISTS experimental.sessions
(
session_id UInt64,
project_id UInt16,
tracker_version LowCardinality(String),
rev_id LowCardinality(Nullable(String)),
user_uuid UUID,
user_os LowCardinality(String),
user_os_version LowCardinality(Nullable(String)),
user_browser LowCardinality(String),
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String),
user_id Nullable(String),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
issue_types Array(LowCardinality(String)),
referrer Nullable(String),
base_referrer Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 3 MONTH
SETTINGS index_granularity = 512;

View file

@ -0,0 +1,11 @@
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now(),
sign Int8
) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
project_id UInt16,
user_id UInt32,
error_id String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -0,0 +1,10 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -0,0 +1,13 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
FROM experimental.sessions
WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)
AND duration > 0;

View file

@ -0,0 +1 @@
CREATE DATABASE IF NOT EXISTS experimental;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.autocomplete
(
project_id UInt16,
type LowCardinality(String),
value String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;

View file

@ -0,0 +1,72 @@
CREATE TABLE IF NOT EXISTS experimental.events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'LONGTASK'=5,'ERROR'=6,'CUSTOM'=7, 'GRAPHQL'=8, 'STATEACTION'=9),
datetime DateTime,
label Nullable(String),
hesitation_time Nullable(UInt32),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
message Nullable(String),
error_id Nullable(String),
duration Nullable(UInt16),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
container_type Nullable(Enum8('window'=0, 'iframe'=1, 'embed'=2, 'object'=3)),
container_id Nullable(String),
container_name Nullable(String),
container_src Nullable(String),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String) MATERIALIZED lower(pathFull(url)),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
dom_content_loaded_event_start Nullable(UInt16),
dom_content_loaded_event_end Nullable(UInt16),
load_event_start Nullable(UInt16),
load_event_end Nullable(UInt16),
first_paint Nullable(UInt16),
first_contentful_paint_time Nullable(UInt16),
speed_index Nullable(UInt16),
visually_complete Nullable(UInt16),
time_to_interactive Nullable(UInt16),
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
minus(response_start, request_start), Null),
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
minus(response_end, request_start), Null),
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
minus(response_end, response_start), Null),
dom_building_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_start, response_end),
minus(dom_content_loaded_event_start, response_end), Null),
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
minus(load_event_end, load_event_start), Null),
min_fps Nullable(UInt8),
avg_fps Nullable(UInt8),
max_fps Nullable(UInt8),
min_cpu Nullable(UInt8),
avg_cpu Nullable(UInt8),
max_cpu Nullable(UInt8),
min_total_js_heap_size Nullable(UInt64),
avg_total_js_heap_size Nullable(UInt64),
max_total_js_heap_size Nullable(UInt64),
min_used_js_heap_size Nullable(UInt64),
avg_used_js_heap_size Nullable(UInt64),
max_used_js_heap_size Nullable(UInt64),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id)
TTL datetime + INTERVAL 3 MONTH;

View file

@ -0,0 +1,25 @@
CREATE TABLE IF NOT EXISTS experimental.resources
(
session_id UInt64,
project_id UInt16,
datetime DateTime,
url String,
url_host String MATERIALIZED lower(domain(url)),
url_path String MATERIALIZED lower(path(url)),
url_hostpath String MATERIALIZED concat(url_host, url_path),
type Enum8('other'=-1, 'script'=0, 'stylesheet'=1, 'fetch'=2, 'img'=3, 'media'=4),
name Nullable(String) MATERIALIZED if(type = 'fetch', null,
coalesce(nullIf(splitByChar('/', url_path)[-1], ''),
nullIf(splitByChar('/', url_path)[-2], ''))),
duration Nullable(UInt16),
ttfb Nullable(UInt16),
header_size Nullable(UInt16),
encoded_body_size Nullable(UInt32),
decoded_body_size Nullable(UInt32),
compression_ratio Nullable(Float32) MATERIALIZED divide(decoded_body_size, encoded_body_size),
success Nullable(UInt8) COMMENT 'currently available for type=img only',
_timestamp DateTime DEFAULT now()
) ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, type, session_id)
TTL datetime + INTERVAL 3 MONTH;

View file

@ -0,0 +1,42 @@
CREATE TABLE IF NOT EXISTS experimental.sessions
(
session_id UInt64,
project_id UInt16,
tracker_version LowCardinality(String),
rev_id LowCardinality(Nullable(String)),
user_uuid UUID,
user_os LowCardinality(String),
user_os_version LowCardinality(Nullable(String)),
user_browser LowCardinality(String),
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String),
user_id Nullable(String),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
issue_types Array(LowCardinality(String)),
referrer Nullable(String),
base_referrer Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 3 MONTH
SETTINGS index_granularity = 512;

View file

@ -0,0 +1,11 @@
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now(),
sign Int8
) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
project_id UInt16,
user_id UInt32,
error_id String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,10 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -0,0 +1,10 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.resources_l7d_mv
ENGINE = MergeTree
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT *
FROM experimental.resources
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -0,0 +1,13 @@
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.sessions_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 7 DAY
SETTINGS index_granularity = 512
POPULATE
AS
SELECT *
FROM experimental.sessions
WHERE datetime >= now() - INTERVAL 7 DAY
AND isNotNull(duration)
AND duration > 0;

View file

@ -1,215 +0,0 @@
CREATE TABLE IF NOT EXISTS negatives_buffer
(
sessionid UInt64,
clickevent_hesitationtime Nullable(UInt64),
clickevent_label Nullable(String),
clickevent_messageid Nullable(UInt64),
clickevent_timestamp Nullable(Datetime),
connectioninformation_downlink Nullable(UInt64),
connectioninformation_type Nullable(String),
consolelog_level Nullable(String),
consolelog_value Nullable(String),
cpuissue_duration Nullable(UInt64),
cpuissue_rate Nullable(UInt64),
cpuissue_timestamp Nullable(Datetime),
createdocument Nullable(UInt8),
createelementnode_id Nullable(UInt64),
createelementnode_parentid Nullable(UInt64),
cssdeleterule_index Nullable(UInt64),
cssdeleterule_stylesheetid Nullable(UInt64),
cssinsertrule_index Nullable(UInt64),
cssinsertrule_rule Nullable(String),
cssinsertrule_stylesheetid Nullable(UInt64),
customevent_messageid Nullable(UInt64),
customevent_name Nullable(String),
customevent_payload Nullable(String),
customevent_timestamp Nullable(Datetime),
domdrop_timestamp Nullable(Datetime),
errorevent_message Nullable(String),
errorevent_messageid Nullable(UInt64),
errorevent_name Nullable(String),
errorevent_payload Nullable(String),
errorevent_source Nullable(String),
errorevent_timestamp Nullable(Datetime),
fetch_duration Nullable(UInt64),
fetch_method Nullable(String),
fetch_request Nullable(String),
fetch_status Nullable(UInt64),
fetch_timestamp Nullable(Datetime),
fetch_url Nullable(String),
graphql_operationkind Nullable(String),
graphql_operationname Nullable(String),
graphql_response Nullable(String),
graphql_variables Nullable(String),
graphqlevent_messageid Nullable(UInt64),
graphqlevent_name Nullable(String),
graphqlevent_timestamp Nullable(Datetime),
inputevent_label Nullable(String),
inputevent_messageid Nullable(UInt64),
inputevent_timestamp Nullable(Datetime),
inputevent_value Nullable(String),
inputevent_valuemasked Nullable(UInt8),
jsexception_message Nullable(String),
jsexception_name Nullable(String),
jsexception_payload Nullable(String),
longtasks_timestamp Nullable(Datetime),
longtasks_duration Nullable(UInt64),
longtasks_containerid Nullable(String),
longtasks_containersrc Nullable(String),
memoryissue_duration Nullable(UInt64),
memoryissue_rate Nullable(UInt64),
memoryissue_timestamp Nullable(Datetime),
metadata_key Nullable(String),
metadata_value Nullable(String),
mobx_payload Nullable(String),
mobx_type Nullable(String),
mouseclick_id Nullable(UInt64),
mouseclick_hesitationtime Nullable(UInt64),
mouseclick_label Nullable(String),
mousemove_x Nullable(UInt64),
mousemove_y Nullable(UInt64),
movenode_id Nullable(UInt64),
movenode_index Nullable(UInt64),
movenode_parentid Nullable(UInt64),
ngrx_action Nullable(String),
ngrx_duration Nullable(UInt64),
ngrx_state Nullable(String),
pageevent_domcontentloadedeventend Nullable(UInt64),
pageevent_domcontentloadedeventstart Nullable(UInt64),
pageevent_firstcontentfulpaint Nullable(UInt64),
pageevent_firstpaint Nullable(UInt64),
pageevent_loaded Nullable(UInt8),
pageevent_loadeventend Nullable(UInt64),
pageevent_loadeventstart Nullable(UInt64),
pageevent_messageid Nullable(UInt64),
pageevent_referrer Nullable(String),
pageevent_requeststart Nullable(UInt64),
pageevent_responseend Nullable(UInt64),
pageevent_responsestart Nullable(UInt64),
pageevent_speedindex Nullable(UInt64),
pageevent_timestamp Nullable(Datetime),
pageevent_url Nullable(String),
pageloadtiming_domcontentloadedeventend Nullable(UInt64),
pageloadtiming_domcontentloadedeventstart Nullable(UInt64),
pageloadtiming_firstcontentfulpaint Nullable(UInt64),
pageloadtiming_firstpaint Nullable(UInt64),
pageloadtiming_loadeventend Nullable(UInt64),
pageloadtiming_loadeventstart Nullable(UInt64),
pageloadtiming_requeststart Nullable(UInt64),
pageloadtiming_responseend Nullable(UInt64),
pageloadtiming_responsestart Nullable(UInt64),
pagerendertiming_speedindex Nullable(UInt64),
pagerendertiming_timetointeractive Nullable(UInt64),
pagerendertiming_visuallycomplete Nullable(UInt64),
performancetrack_frames Nullable(Int64),
performancetrack_ticks Nullable(Int64),
performancetrack_totaljsheapsize Nullable(UInt64),
performancetrack_usedjsheapsize Nullable(UInt64),
performancetrackaggr_avgcpu Nullable(UInt64),
performancetrackaggr_avgfps Nullable(UInt64),
performancetrackaggr_avgtotaljsheapsize Nullable(UInt64),
performancetrackaggr_avgusedjsheapsize Nullable(UInt64),
performancetrackaggr_maxcpu Nullable(UInt64),
performancetrackaggr_maxfps Nullable(UInt64),
performancetrackaggr_maxtotaljsheapsize Nullable(UInt64),
performancetrackaggr_maxusedjsheapsize Nullable(UInt64),
performancetrackaggr_mincpu Nullable(UInt64),
performancetrackaggr_minfps Nullable(UInt64),
performancetrackaggr_mintotaljsheapsize Nullable(UInt64),
performancetrackaggr_minusedjsheapsize Nullable(UInt64),
performancetrackaggr_timestampend Nullable(Datetime),
performancetrackaggr_timestampstart Nullable(Datetime),
profiler_args Nullable(String),
profiler_duration Nullable(UInt64),
profiler_name Nullable(String),
profiler_result Nullable(String),
rawcustomevent_name Nullable(String),
rawcustomevent_payload Nullable(String),
rawerrorevent_message Nullable(String),
rawerrorevent_name Nullable(String),
rawerrorevent_payload Nullable(String),
rawerrorevent_source Nullable(String),
rawerrorevent_timestamp Nullable(Datetime),
redux_action Nullable(String),
redux_duration Nullable(UInt64),
redux_state Nullable(String),
removenode_id Nullable(UInt64),
removenodeattribute_id Nullable(UInt64),
removenodeattribute_name Nullable(String),
resourceevent_decodedbodysize Nullable(UInt64),
resourceevent_duration Nullable(UInt64),
resourceevent_encodedbodysize Nullable(UInt64),
resourceevent_headersize Nullable(UInt64),
resourceevent_messageid Nullable(UInt64),
resourceevent_method Nullable(String),
resourceevent_status Nullable(UInt64),
resourceevent_success Nullable(UInt8),
resourceevent_timestamp Nullable(Datetime),
resourceevent_ttfb Nullable(UInt64),
resourceevent_type Nullable(String),
resourceevent_url Nullable(String),
resourcetiming_decodedbodysize Nullable(UInt64),
resourcetiming_duration Nullable(UInt64),
resourcetiming_encodedbodysize Nullable(UInt64),
resourcetiming_headersize Nullable(UInt64),
resourcetiming_initiator Nullable(String),
resourcetiming_timestamp Nullable(Datetime),
resourcetiming_ttfb Nullable(UInt64),
resourcetiming_url Nullable(String),
sessiondisconnect Nullable(UInt8),
sessiondisconnect_timestamp Nullable(Datetime),
sessionend Nullable(UInt8),
sessionend_timestamp Nullable(Datetime),
sessionstart_projectid Nullable(UInt64),
sessionstart_revid Nullable(String),
sessionstart_timestamp Nullable(Datetime),
sessionstart_trackerversion Nullable(String),
sessionstart_useragent Nullable(String),
sessionstart_userbrowser Nullable(String),
sessionstart_userbrowserversion Nullable(String),
sessionstart_usercountry Nullable(String),
sessionstart_userdevice Nullable(String),
sessionstart_userdeviceheapsize Nullable(UInt64),
sessionstart_userdevicememorysize Nullable(UInt64),
sessionstart_userdevicetype Nullable(String),
sessionstart_useros Nullable(String),
sessionstart_userosversion Nullable(String),
sessionstart_useruuid Nullable(String),
setcssdata_data Nullable(UInt64),
setcssdata_id Nullable(UInt64),
setinputchecked_checked Nullable(UInt64),
setinputchecked_id Nullable(UInt64),
setinputtarget_id Nullable(UInt64),
setinputtarget_label Nullable(UInt64),
setinputvalue_id Nullable(UInt64),
setinputvalue_mask Nullable(UInt64),
setinputvalue_value Nullable(UInt64),
setnodeattribute_id Nullable(UInt64),
setnodeattribute_name Nullable(UInt64),
setnodeattribute_value Nullable(UInt64),
setnodedata_data Nullable(UInt64),
setnodedata_id Nullable(UInt64),
setnodescroll_id Nullable(UInt64),
setnodescroll_x Nullable(Int64),
setnodescroll_y Nullable(Int64),
setpagelocation_navigationstart Nullable(UInt64),
setpagelocation_referrer Nullable(String),
setpagelocation_url Nullable(String),
setpagevisibility_hidden Nullable(UInt8),
setviewportscroll_x Nullable(Int64),
setviewportscroll_y Nullable(Int64),
setviewportsize_height Nullable(UInt64),
setviewportsize_width Nullable(UInt64),
stateaction_type Nullable(String),
stateactionevent_messageid Nullable(UInt64),
stateactionevent_timestamp Nullable(Datetime),
stateactionevent_type Nullable(String),
timestamp_timestamp Nullable(Datetime),
useranonymousid_id Nullable(String),
userid_id Nullable(String),
vuex_mutation Nullable(String),
vuex_state Nullable(String),
received_at Datetime,
batch_order_number Int64
)
ENGINE = Buffer(default, negatives, 16, 10, 120, 10000, 1000000, 10000, 100000000);

View file

@ -1,218 +0,0 @@
CREATE TABLE IF NOT EXISTS negatives
(
sessionid UInt64,
clickevent_hesitationtime Nullable(UInt64),
clickevent_label Nullable(String),
clickevent_messageid Nullable(UInt64),
clickevent_timestamp Nullable(Datetime),
connectioninformation_downlink Nullable(UInt64),
connectioninformation_type Nullable(String),
consolelog_level Nullable(String),
consolelog_value Nullable(String),
cpuissue_duration Nullable(UInt64),
cpuissue_rate Nullable(UInt64),
cpuissue_timestamp Nullable(Datetime),
createdocument Nullable(UInt8),
createelementnode_id Nullable(UInt64),
createelementnode_parentid Nullable(UInt64),
cssdeleterule_index Nullable(UInt64),
cssdeleterule_stylesheetid Nullable(UInt64),
cssinsertrule_index Nullable(UInt64),
cssinsertrule_rule Nullable(String),
cssinsertrule_stylesheetid Nullable(UInt64),
customevent_messageid Nullable(UInt64),
customevent_name Nullable(String),
customevent_payload Nullable(String),
customevent_timestamp Nullable(Datetime),
domdrop_timestamp Nullable(Datetime),
errorevent_message Nullable(String),
errorevent_messageid Nullable(UInt64),
errorevent_name Nullable(String),
errorevent_payload Nullable(String),
errorevent_source Nullable(String),
errorevent_timestamp Nullable(Datetime),
fetch_duration Nullable(UInt64),
fetch_method Nullable(String),
fetch_request Nullable(String),
fetch_status Nullable(UInt64),
fetch_timestamp Nullable(Datetime),
fetch_url Nullable(String),
graphql_operationkind Nullable(String),
graphql_operationname Nullable(String),
graphql_response Nullable(String),
graphql_variables Nullable(String),
graphqlevent_messageid Nullable(UInt64),
graphqlevent_name Nullable(String),
graphqlevent_timestamp Nullable(Datetime),
inputevent_label Nullable(String),
inputevent_messageid Nullable(UInt64),
inputevent_timestamp Nullable(Datetime),
inputevent_value Nullable(String),
inputevent_valuemasked Nullable(UInt8),
jsexception_message Nullable(String),
jsexception_name Nullable(String),
jsexception_payload Nullable(String),
longtasks_timestamp Nullable(Datetime),
longtasks_duration Nullable(UInt64),
longtasks_containerid Nullable(String),
longtasks_containersrc Nullable(String),
memoryissue_duration Nullable(UInt64),
memoryissue_rate Nullable(UInt64),
memoryissue_timestamp Nullable(Datetime),
metadata_key Nullable(String),
metadata_value Nullable(String),
mobx_payload Nullable(String),
mobx_type Nullable(String),
mouseclick_id Nullable(UInt64),
mouseclick_hesitationtime Nullable(UInt64),
mouseclick_label Nullable(String),
mousemove_x Nullable(UInt64),
mousemove_y Nullable(UInt64),
movenode_id Nullable(UInt64),
movenode_index Nullable(UInt64),
movenode_parentid Nullable(UInt64),
ngrx_action Nullable(String),
ngrx_duration Nullable(UInt64),
ngrx_state Nullable(String),
pageevent_domcontentloadedeventend Nullable(UInt64),
pageevent_domcontentloadedeventstart Nullable(UInt64),
pageevent_firstcontentfulpaint Nullable(UInt64),
pageevent_firstpaint Nullable(UInt64),
pageevent_loaded Nullable(UInt8),
pageevent_loadeventend Nullable(UInt64),
pageevent_loadeventstart Nullable(UInt64),
pageevent_messageid Nullable(UInt64),
pageevent_referrer Nullable(String),
pageevent_requeststart Nullable(UInt64),
pageevent_responseend Nullable(UInt64),
pageevent_responsestart Nullable(UInt64),
pageevent_speedindex Nullable(UInt64),
pageevent_timestamp Nullable(Datetime),
pageevent_url Nullable(String),
pageloadtiming_domcontentloadedeventend Nullable(UInt64),
pageloadtiming_domcontentloadedeventstart Nullable(UInt64),
pageloadtiming_firstcontentfulpaint Nullable(UInt64),
pageloadtiming_firstpaint Nullable(UInt64),
pageloadtiming_loadeventend Nullable(UInt64),
pageloadtiming_loadeventstart Nullable(UInt64),
pageloadtiming_requeststart Nullable(UInt64),
pageloadtiming_responseend Nullable(UInt64),
pageloadtiming_responsestart Nullable(UInt64),
pagerendertiming_speedindex Nullable(UInt64),
pagerendertiming_timetointeractive Nullable(UInt64),
pagerendertiming_visuallycomplete Nullable(UInt64),
performancetrack_frames Nullable(Int64),
performancetrack_ticks Nullable(Int64),
performancetrack_totaljsheapsize Nullable(UInt64),
performancetrack_usedjsheapsize Nullable(UInt64),
performancetrackaggr_avgcpu Nullable(UInt64),
performancetrackaggr_avgfps Nullable(UInt64),
performancetrackaggr_avgtotaljsheapsize Nullable(UInt64),
performancetrackaggr_avgusedjsheapsize Nullable(UInt64),
performancetrackaggr_maxcpu Nullable(UInt64),
performancetrackaggr_maxfps Nullable(UInt64),
performancetrackaggr_maxtotaljsheapsize Nullable(UInt64),
performancetrackaggr_maxusedjsheapsize Nullable(UInt64),
performancetrackaggr_mincpu Nullable(UInt64),
performancetrackaggr_minfps Nullable(UInt64),
performancetrackaggr_mintotaljsheapsize Nullable(UInt64),
performancetrackaggr_minusedjsheapsize Nullable(UInt64),
performancetrackaggr_timestampend Nullable(Datetime),
performancetrackaggr_timestampstart Nullable(Datetime),
profiler_args Nullable(String),
profiler_duration Nullable(UInt64),
profiler_name Nullable(String),
profiler_result Nullable(String),
rawcustomevent_name Nullable(String),
rawcustomevent_payload Nullable(String),
rawerrorevent_message Nullable(String),
rawerrorevent_name Nullable(String),
rawerrorevent_payload Nullable(String),
rawerrorevent_source Nullable(String),
rawerrorevent_timestamp Nullable(Datetime),
redux_action Nullable(String),
redux_duration Nullable(UInt64),
redux_state Nullable(String),
removenode_id Nullable(UInt64),
removenodeattribute_id Nullable(UInt64),
removenodeattribute_name Nullable(String),
resourceevent_decodedbodysize Nullable(UInt64),
resourceevent_duration Nullable(UInt64),
resourceevent_encodedbodysize Nullable(UInt64),
resourceevent_headersize Nullable(UInt64),
resourceevent_messageid Nullable(UInt64),
resourceevent_method Nullable(String),
resourceevent_status Nullable(UInt64),
resourceevent_success Nullable(UInt8),
resourceevent_timestamp Nullable(Datetime),
resourceevent_ttfb Nullable(UInt64),
resourceevent_type Nullable(String),
resourceevent_url Nullable(String),
resourcetiming_decodedbodysize Nullable(UInt64),
resourcetiming_duration Nullable(UInt64),
resourcetiming_encodedbodysize Nullable(UInt64),
resourcetiming_headersize Nullable(UInt64),
resourcetiming_initiator Nullable(String),
resourcetiming_timestamp Nullable(Datetime),
resourcetiming_ttfb Nullable(UInt64),
resourcetiming_url Nullable(String),
sessiondisconnect Nullable(UInt8),
sessiondisconnect_timestamp Nullable(Datetime),
sessionend Nullable(UInt8),
sessionend_timestamp Nullable(Datetime),
sessionstart_projectid Nullable(UInt64),
sessionstart_revid Nullable(String),
sessionstart_timestamp Nullable(Datetime),
sessionstart_trackerversion Nullable(String),
sessionstart_useragent Nullable(String),
sessionstart_userbrowser Nullable(String),
sessionstart_userbrowserversion Nullable(String),
sessionstart_usercountry Nullable(String),
sessionstart_userdevice Nullable(String),
sessionstart_userdeviceheapsize Nullable(UInt64),
sessionstart_userdevicememorysize Nullable(UInt64),
sessionstart_userdevicetype Nullable(String),
sessionstart_useros Nullable(String),
sessionstart_userosversion Nullable(String),
sessionstart_useruuid Nullable(String),
setcssdata_data Nullable(UInt64),
setcssdata_id Nullable(UInt64),
setinputchecked_checked Nullable(UInt64),
setinputchecked_id Nullable(UInt64),
setinputtarget_id Nullable(UInt64),
setinputtarget_label Nullable(UInt64),
setinputvalue_id Nullable(UInt64),
setinputvalue_mask Nullable(UInt64),
setinputvalue_value Nullable(UInt64),
setnodeattribute_id Nullable(UInt64),
setnodeattribute_name Nullable(UInt64),
setnodeattribute_value Nullable(UInt64),
setnodedata_data Nullable(UInt64),
setnodedata_id Nullable(UInt64),
setnodescroll_id Nullable(UInt64),
setnodescroll_x Nullable(Int64),
setnodescroll_y Nullable(Int64),
setpagelocation_navigationstart Nullable(UInt64),
setpagelocation_referrer Nullable(String),
setpagelocation_url Nullable(String),
setpagevisibility_hidden Nullable(UInt8),
setviewportscroll_x Nullable(Int64),
setviewportscroll_y Nullable(Int64),
setviewportsize_height Nullable(UInt64),
setviewportsize_width Nullable(UInt64),
stateaction_type Nullable(String),
stateactionevent_messageid Nullable(UInt64),
stateactionevent_timestamp Nullable(Datetime),
stateactionevent_type Nullable(String),
timestamp_timestamp Nullable(Datetime),
useranonymousid_id Nullable(String),
userid_id Nullable(String),
vuex_mutation Nullable(String),
vuex_state Nullable(String),
received_at Datetime,
batch_order_number Int64
)
ENGINE = MergeTree()
PARTITION BY toYYYYMM(received_at)
ORDER BY (received_at, batch_order_number)
SETTINGS min_bytes_for_wide_part = 1, use_minimalistic_part_header_in_zookeeper = 1;

View file

@ -0,0 +1,20 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.8.0-ee'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS sessions_last_check_at timestamp without time zone NULL DEFAULT NULL;
COMMIT;
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_unique_project_id_md5value_type_idx ON autocomplete (project_id, md5(value), type);
BEGIN;
DROP INDEX IF EXISTS autocomplete_unique;
COMMIT;

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.7.0-ee'
SELECT 'v1.8.0-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -228,32 +228,34 @@ $$
CREATE TABLE IF NOT EXISTS projects
(
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
name text NOT NULL,
active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
max_session_duration integer NOT NULL DEFAULT 7200000,
metadata_1 text DEFAULT NULL,
metadata_2 text DEFAULT NULL,
metadata_3 text DEFAULT NULL,
metadata_4 text DEFAULT NULL,
metadata_5 text DEFAULT NULL,
metadata_6 text DEFAULT NULL,
metadata_7 text DEFAULT NULL,
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL,
save_request_payloads boolean NOT NULL DEFAULT FALSE,
gdpr jsonb NOT NULL DEFAULT'{
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
name text NOT NULL,
active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
max_session_duration integer NOT NULL DEFAULT 7200000,
metadata_1 text DEFAULT NULL,
metadata_2 text DEFAULT NULL,
metadata_3 text DEFAULT NULL,
metadata_4 text DEFAULT NULL,
metadata_5 text DEFAULT NULL,
metadata_6 text DEFAULT NULL,
metadata_7 text DEFAULT NULL,
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL,
save_request_payloads boolean NOT NULL DEFAULT FALSE,
gdpr jsonb NOT NULL DEFAULT'{
"maskEmails": true,
"sampleRate": 33,
"maskNumbers": false,
"defaultInputMode": "plain"
}'::jsonb
}'::jsonb,
first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL
);

View file

@ -14,3 +14,4 @@ servers/sourcemaps-server.js
/utils/HeapSnapshot.js
/utils/helper.js
/utils/assistHelper.js
.local

View file

@ -0,0 +1,20 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.8.0'
$$ LANGUAGE sql IMMUTABLE;
ALTER TABLE IF EXISTS projects
ADD COLUMN IF NOT EXISTS first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS sessions_last_check_at timestamp without time zone NULL DEFAULT NULL;
COMMIT;
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_unique_project_id_md5value_type_idx ON autocomplete (project_id, md5(value), type);
BEGIN;
DROP INDEX IF EXISTS autocomplete_unique;
COMMIT;

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.7.0'
SELECT 'v1.8.0'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@ -173,31 +173,33 @@ $$
CREATE TABLE projects
(
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
name text NOT NULL,
active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
max_session_duration integer NOT NULL DEFAULT 7200000,
metadata_1 text DEFAULT NULL,
metadata_2 text DEFAULT NULL,
metadata_3 text DEFAULT NULL,
metadata_4 text DEFAULT NULL,
metadata_5 text DEFAULT NULL,
metadata_6 text DEFAULT NULL,
metadata_7 text DEFAULT NULL,
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL,
save_request_payloads boolean NOT NULL DEFAULT FALSE,
gdpr jsonb NOT NULL DEFAULT '{
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
name text NOT NULL,
active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL,
max_session_duration integer NOT NULL DEFAULT 7200000,
metadata_1 text DEFAULT NULL,
metadata_2 text DEFAULT NULL,
metadata_3 text DEFAULT NULL,
metadata_4 text DEFAULT NULL,
metadata_5 text DEFAULT NULL,
metadata_6 text DEFAULT NULL,
metadata_7 text DEFAULT NULL,
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL,
save_request_payloads boolean NOT NULL DEFAULT FALSE,
gdpr jsonb NOT NULL DEFAULT '{
"maskEmails": true,
"sampleRate": 33,
"maskNumbers": false,
"defaultInputMode": "plain"
}'::jsonb -- ??????
}'::jsonb,
first_recorded_session_at timestamp without time zone NULL DEFAULT NULL,
sessions_last_check_at timestamp without time zone NULL DEFAULT NULL
);
CREATE INDEX projects_project_key_idx ON public.projects (project_key);

View file

@ -4,3 +4,4 @@
**/build.sh
**/build_*.sh
**/*deploy.sh
.local

View file

@ -2,4 +2,5 @@
node_modules
npm-debug.log
.cache
test.html
test.html
.local