diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py
index 21249773c..56fde11da 100644
--- a/api/chalicelib/core/alerts_processor.py
+++ b/api/chalicelib/core/alerts_processor.py
@@ -102,11 +102,9 @@ def Build(a):
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
- full_args, query_part, sort = sessions.search_query_parts(
- data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
- error_status=None, errors_only=False,
- favorite_only=False, issue=None, project_id=a["projectId"],
- user_id=None)
+ full_args, query_part= sessions.search_query_parts(
+ data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
+ issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:
diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py
index 70f563ec8..c6fb35713 100644
--- a/api/chalicelib/core/assist.py
+++ b/api/chalicelib/core/assist.py
@@ -64,14 +64,17 @@ def get_live_sessions(project_id, filters=None):
return helper.list_to_camel_case(results)
-def get_live_sessions_ws(project_id):
+def get_live_sessions_ws(project_id, user_id=None):
project_key = projects.get_project_key(project_id)
- connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
- if connected_peers.status_code != 200:
- print("!! issue with the peer-server")
- print(connected_peers.text)
- return []
+ params = {}
+ if user_id and len(user_id) > 0:
+ params["userId"] = user_id
try:
+ connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
+ if connected_peers.status_code != 200:
+ print("!! issue with the peer-server")
+ print(connected_peers.text)
+ return []
live_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Live-Assist response")
@@ -101,12 +104,12 @@ def get_live_session_by_id(project_id, session_id):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
- connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
- if connected_peers.status_code != 200:
- print("!! issue with the peer-server")
- print(connected_peers.text)
- return False
try:
+ connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
+ if connected_peers.status_code != 200:
+ print("!! issue with the peer-server")
+ print(connected_peers.text)
+ return False
connected_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Assist response")
diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py
index 48e6b7bd9..a7f863e79 100644
--- a/api/chalicelib/core/errors.py
+++ b/api/chalicelib/core/errors.py
@@ -1,7 +1,8 @@
import json
+import schemas
from chalicelib.core import sourcemaps, sessions
-from chalicelib.utils import pg_client, helper, dev
+from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@@ -398,79 +399,104 @@ def get_details_chart(project_id, error_id, user_id, **data):
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
- ch_sub_query = [f"{project_key} =%(project_id)s"]
+ if project_key is None:
+ ch_sub_query = []
+ else:
+ ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
- if platform == 'mobile':
+ if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
- elif platform == 'desktop':
+ elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
- "datetime": "max_datetime",
- "lastOccurrence": "max_datetime",
- "firstOccurrence": "min_datetime"
+ schemas.ErrorSort.occurrence: "max_datetime",
+ schemas.ErrorSort.users_count: "users",
+ schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
-@dev.timed
-def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
- status = status.upper()
- if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
- return {"errors": ["invalid error status"]}
- pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id")
+def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
+ empty_response = {"data": {
+ 'total': 0,
+ 'errors': []
+ }}
+
+ platform = None
+ for f in data.filters:
+ if f.type == schemas.FilterType.platform and len(f.value) > 0:
+ platform = f.value[0]
+ pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
- pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True)
- pg_sub_query_chart.append("source ='js_exception'")
+ pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
+ # pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
- if data.get("startDate") is None:
- data["startDate"] = TimeUTC.now(-30)
- if data.get("endDate") is None:
- data["endDate"] = TimeUTC.now(1)
- if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-30)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now(1)
+ if len(data.events) > 0 or len(data.filters) > 0:
+ print("-- searching for sessions before errors")
+ # if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
- error_status=status, favorite_only=favorite_only)
+ error_status=data.status)
if len(statuses) == 0:
- return {"data": {
- 'total': 0,
- 'errors': []
- }}
- error_ids = [e["error_id"] for e in statuses]
+ return empty_response
+ error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
- if data.get("startDate") is None:
- data["startDate"] = TimeUTC.now(-7)
- if data.get("endDate") is None:
- data["endDate"] = TimeUTC.now()
- density = data.get("density", 7)
- step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-7)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now()
+ step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
- if data.get("sort") is not None:
- sort = __get_sort_key(data["sort"])
+ if data.sort is not None:
+ sort = __get_sort_key(data.sort)
order = "DESC"
- if data.get("order") is not None:
- order = data["order"]
+ if data.order is not None:
+ order = data.order
+ extra_join = ""
params = {
- "startDate": data['startDate'],
- "endDate": data['endDate'],
+ "startDate": data.startDate,
+ "endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
+ if data.status != schemas.ErrorStatus.all:
+ pg_sub_query.append("status = %(error_status)s")
+ params["error_status"] = data.status
+ if data.limit is not None and data.page is not None:
+ params["errors_offset"] = (data.page - 1) * data.limit
+ params["errors_limit"] = data.limit
+ else:
+ params["errors_offset"] = 0
+ params["errors_limit"] = 200
+
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
- main_pg_query = f"""\
- SELECT error_id,
+ if data.bookmarked:
+ pg_sub_query.append("ufe.user_id = %(userId)s")
+ extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
+ if data.query is not None and len(data.query) > 0:
+ pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
+ params["error_query"] = helper.values_for_operator(value=data.query,
+ op=schemas.SearchEventOperator._contains)
+
+ main_pg_query = f"""SELECT full_count,
+ error_id,
name,
message,
users,
@@ -478,19 +504,23 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
last_occurrence,
first_occurrence,
chart
- FROM (SELECT error_id,
- name,
- message,
- COUNT(DISTINCT user_uuid) AS users,
- COUNT(DISTINCT session_id) AS sessions,
- MAX(timestamp) AS max_datetime,
- MIN(timestamp) AS min_datetime
- FROM events.errors
- INNER JOIN public.errors AS pe USING (error_id)
- INNER JOIN public.sessions USING (session_id)
- WHERE {" AND ".join(pg_sub_query)}
- GROUP BY error_id, name, message
- ORDER BY {sort} {order}) AS details
+ FROM (SELECT COUNT(details) OVER () AS full_count, details.*
+ FROM (SELECT error_id,
+ name,
+ message,
+ COUNT(DISTINCT user_uuid) AS users,
+ COUNT(DISTINCT session_id) AS sessions,
+ MAX(timestamp) AS max_datetime,
+ MIN(timestamp) AS min_datetime
+ FROM events.errors
+ INNER JOIN public.errors AS pe USING (error_id)
+ INNER JOIN public.sessions USING (session_id)
+ {extra_join}
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY error_id, name, message
+ ORDER BY {sort} {order}) AS details
+ LIMIT %(errors_limit)s OFFSET %(errors_offset)s
+ ) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
@@ -500,7 +530,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
- FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
+ FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
@@ -508,16 +538,14 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
+ # print("--------------------")
+
cur.execute(cur.mogrify(main_pg_query, params))
- total = cur.rowcount
+ rows = cur.fetchall()
+ total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
- row = cur.fetchone()
- rows = []
- limit = 200
- while row is not None and len(rows) < limit:
- rows.append(row)
- row = cur.fetchone()
+
if total == 0:
rows = []
else:
@@ -537,15 +565,16 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
- statuses = cur.fetchall()
+ statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
- s["error_id"]: s for s in statuses
+ s["errorId"]: s for s in statuses
}
for r in rows:
+ r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
- r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
+ r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@@ -581,7 +610,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
- error = get(error_id=error_id)
+ error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":
diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py
index 7abaa4fe9..933e3f800 100644
--- a/api/chalicelib/core/events.py
+++ b/api/chalicelib/core/events.py
@@ -97,7 +97,55 @@ def __get_data_for_extend(data):
return data["data"]
-def __pg_errors_query(source=None):
+def __pg_errors_query(source=None, value_length=None):
+ if value_length is None or value_length > 2:
+ return f"""((SELECT DISTINCT ON(lg.message)
+ lg.message AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.message ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION
+ (SELECT DISTINCT ON(lg.message)
+ lg.message AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.message ILIKE %(value)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ source,
+ '{event_type.ERROR.ui_type}' AS type
+ FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.name ILIKE %(value)s
+ AND lg.project_id = %(project_id)s
+ {"AND source = %(source)s" if source is not None else ""}
+ LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
@@ -120,30 +168,6 @@ def __pg_errors_query(source=None):
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
- LIMIT 5)
- UNION
- (SELECT DISTINCT ON(lg.message)
- lg.message AS value,
- source,
- '{event_type.ERROR.ui_type}' AS type
- FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
- WHERE
- s.project_id = %(project_id)s
- AND lg.message ILIKE %(value)s
- AND lg.project_id = %(project_id)s
- {"AND source = %(source)s" if source is not None else ""}
- LIMIT 5)
- UNION ALL
- (SELECT DISTINCT ON(lg.name)
- lg.name AS value,
- source,
- '{event_type.ERROR.ui_type}' AS type
- FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
- WHERE
- s.project_id = %(project_id)s
- AND lg.name ILIKE %(value)s
- AND lg.project_id = %(project_id)s
- {"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
@@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
- cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value),
- "svalue": helper.string_to_sql_like("^" + value),
- "source": source}))
+ cur.mogrify(__pg_errors_query(source,
+ value_length=len(value) \
+ if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None),
+ {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value),
+ "source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None):
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
+ if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2:
+ query = f"""(SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(value)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(value)s
+ LIMIT 5);"""
+ else:
+ query = f"""(SELECT DISTINCT ON(lg.reason)
+ lg.reason AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.reason ILIKE %(svalue)s
+ LIMIT 5)
+ UNION ALL
+ (SELECT DISTINCT ON(lg.name)
+ lg.name AS value,
+ '{event_type.ERROR_IOS.ui_type}' AS type
+ FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
+ WHERE
+ s.project_id = %(project_id)s
+ AND lg.project_id = %(project_id)s
+ AND lg.name ILIKE %(svalue)s
+ LIMIT 5);"""
with pg_client.PostgresClient() as cur:
- cur.execute(
- cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason)
- lg.reason AS value,
- '{event_type.ERROR_IOS.ui_type}' AS type
- FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
- WHERE
- s.project_id = %(project_id)s
- AND lg.reason ILIKE %(value)s
- LIMIT 5)
- UNION ALL
- (SELECT DISTINCT ON(lg.name)
- lg.name AS value,
- '{event_type.ERROR_IOS.ui_type}' AS type
- FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
- WHERE
- s.project_id = %(project_id)s
- AND lg.name ILIKE %(value)s
- LIMIT 5);""",
- {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
+ cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
- sub_from.append(
- f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)")
+ if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2:
+ sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(svalue)s LIMIT 5)
+ UNION
+ (SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(value)s LIMIT 5))
+ """)
+ else:
+ sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
+ FROM public.sessions
+ WHERE project_id = %(project_id)s
+ AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
- LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
+ LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
-def __generic_query(typename):
- return f"""\
- (SELECT value, type
- FROM public.autocomplete
- WHERE
- project_id = %(project_id)s
- AND type='{typename}'
- AND value ILIKE %(svalue)s
- LIMIT 5)
- UNION
- (SELECT value, type
- FROM public.autocomplete
- WHERE
- project_id = %(project_id)s
- AND type='{typename}'
- AND value ILIKE %(value)s
- LIMIT 5)"""
+def __generic_query(typename, value_length=None):
+ if value_length is None or value_length > 2:
+ return f"""(SELECT DISTINCT value, type
+ FROM public.autocomplete
+ WHERE
+ project_id = %(project_id)s
+ AND type='{typename}'
+ AND value ILIKE %(svalue)s
+ LIMIT 5)
+ UNION
+ (SELECT DISTINCT value, type
+ FROM public.autocomplete
+ WHERE
+ project_id = %(project_id)s
+ AND type='{typename}'
+ AND value ILIKE %(value)s
+ LIMIT 5);"""
+ return f"""SELECT DISTINCT value, type
+ FROM public.autocomplete
+ WHERE
+ project_id = %(project_id)s
+ AND type='{typename}'
+ AND value ILIKE %(svalue)s
+ LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
- cur.execute(cur.mogrify(__generic_query(event.ui_type),
- {"project_id": project_id, "value": helper.string_to_sql_like(value),
- "svalue": helper.string_to_sql_like("^" + value)}))
+ cur.execute(
+ cur.mogrify(
+ __generic_query(event.ui_type,
+ value_length=len(value) \
+ if SUPPORTED_TYPES[event.ui_type].change_by_length \
+ else None),
+ {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)}))
return helper.list_to_camel_case(cur.fetchall())
return f
@@ -263,142 +360,96 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
query=__generic_query(typename=event_type.CLICK.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
query=__generic_query(typename=event_type.INPUT.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
query=__generic_query(typename=event_type.LOCATION.ui_type),
- value_limit=3,
- starts_with="/",
- starts_limit=3,
- ignore_if_starts_with=[]),
+ change_by_length=True),
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
query=__generic_query(typename=event_type.CUSTOM.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=[""]),
+ change_by_length=True),
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
query=__generic_query(typename=event_type.REQUEST.ui_type),
- value_limit=3,
- starts_with="/",
- starts_limit=3,
- ignore_if_starts_with=[""]),
+ change_by_length=True),
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
- value_limit=3,
- starts_with="/",
- starts_limit=4,
- ignore_if_starts_with=[]),
+ change_by_length=True),
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
query=__generic_query(typename=event_type.STATEACTION.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=[]),
+ change_by_length=True),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
- query=None,
- value_limit=4,
- starts_with="",
- starts_limit=4,
- ignore_if_starts_with=["/"]),
+ query=None, change_by_length=True),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
- query=None,
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ query=None, change_by_length=True),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
- value_limit=3,
- starts_with="/",
- starts_limit=3,
- ignore_if_starts_with=[]),
+ change_by_length=True),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=[""]),
+ change_by_length=True),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
- value_limit=3,
- starts_with="/",
- starts_limit=3,
- ignore_if_starts_with=[""]),
- event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors,
- query=None,
- value_limit=4,
- starts_with="",
- starts_limit=4,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
+ event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
+ query=None, change_by_length=True),
}
-def __get_merged_queries(queries, value, project_id):
- if len(queries) == 0:
- return []
- now = TimeUTC.now()
- with pg_client.PostgresClient() as cur:
- cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")",
- {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
- results = helper.list_to_camel_case(cur.fetchall())
- print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}")
- return results
-
-
def __get_autocomplete_table(value, project_id):
+ autocomplete_events = [schemas.FilterType.rev_id,
+ schemas.EventType.click,
+ schemas.FilterType.user_device,
+ schemas.FilterType.user_id,
+ schemas.FilterType.user_browser,
+ schemas.FilterType.user_os,
+ schemas.EventType.custom,
+ schemas.FilterType.user_country,
+ schemas.EventType.location,
+ schemas.EventType.input]
+ autocomplete_events.sort()
+ sub_queries = []
+ for e in autocomplete_events:
+ sub_queries.append(f"""(SELECT type, value
+ FROM public.autocomplete
+ WHERE project_id = %(project_id)s
+ AND type= '{e}'
+ AND value ILIKE %(svalue)s
+ LIMIT 5)""")
+ if len(value) > 2:
+ sub_queries.append(f"""(SELECT type, value
+ FROM public.autocomplete
+ WHERE project_id = %(project_id)s
+ AND type= '{e}'
+ AND value ILIKE %(value)s
+ LIMIT 5)""")
with pg_client.PostgresClient() as cur:
- cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
- FROM (SELECT project_id, type, value
- FROM (SELECT *,
- ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
- FROM public.autocomplete
- WHERE project_id = %(project_id)s
- AND value ILIKE %(svalue)s
- UNION
- SELECT *,
- ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
- FROM public.autocomplete
- WHERE project_id = %(project_id)s
- AND value ILIKE %(value)s) AS u
- WHERE Row_ID <= 5) AS sfa
- ORDER BY sfa.type;""",
- {"project_id": project_id, "value": helper.string_to_sql_like(value),
- "svalue": helper.string_to_sql_like("^" + value)}))
+ query = cur.mogrify(" UNION ".join(sub_queries) + ";",
+ {"project_id": project_id, "value": helper.string_to_sql_like(value),
+ "svalue": helper.string_to_sql_like("^" + value)})
+ cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
-def search_pg2(text, event_type, project_id, source, key):
+def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
- if event_type + "_IOS" in SUPPORTED_TYPES.keys():
- rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
- source=source)
+ # for IOS events autocomplete
+ # if event_type + "_IOS" in SUPPORTED_TYPES.keys():
+ # rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
+ # source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py
index cdd6cec20..1dc9e3347 100644
--- a/api/chalicelib/core/funnels.py
+++ b/api/chalicelib/core/funnels.py
@@ -1,4 +1,5 @@
import json
+from typing import List
import chalicelib.utils.helper
import schemas
@@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
-# def filter_stages(stages):
-# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
-# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
-# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
-# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
-# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
+def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
+ ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
+ schemas.EventType.location, schemas.EventType.custom,
+ schemas.EventType.click_ios, schemas.EventType.input_ios,
+ schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
+ return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
+
+
+def __parse_events(f_events: List[dict]):
+ return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
+
+
+def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
+ return [e.dict() for e in f_events]
+
+
+def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
+ if f_events is None:
+ return
+ events = []
+ for e in f_events:
+ if e.operator is None:
+ e.operator = schemas.SearchEventOperator._is
+
+ if not isinstance(e.value, list):
+ e.value = [e.value]
+ is_any = sessions._isAny_opreator(e.operator)
+ if not is_any and isinstance(e.value, list) and len(e.value) == 0:
+ continue
+ events.append(e)
+ return events
+
def __transform_old_funnels(events):
for e in events:
@@ -28,7 +55,7 @@ def __transform_old_funnels(events):
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
- # filter.events = filter_stages(stages=filter.events)
+ filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
@@ -76,9 +103,12 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non
query
)
r = cur.fetchone()
+ if r is None:
+ return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
+ r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
@@ -102,9 +132,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
- # row["filter"]["events"] = filter_stages(row["filter"]["events"])
+ row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
- row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
+ row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
@@ -168,9 +198,10 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
- # data.events = filter_stages(data.events)
+ data.events = filter_stages(data.events)
+ data.events = __fix_stages(data.events)
if len(data.events) == 0:
- f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
@@ -186,26 +217,37 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
+ insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
+ # fix: this fix for huge drop count
+ if total_drop_due_to_issues > insights[0]["sessionsCount"]:
+ total_drop_due_to_issues = insights[0]["sessionsCount"]
+ # end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
- return {"data": {"stages": helper.list_to_camel_case(insights),
+ return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
-def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data):
- # data["events"] = filter_stages(data.get("events", []))
- if len(data["events"]) == 0:
- f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
+def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
+ data.events = filter_stages(__parse_events(data.events))
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
- get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
- start_date=data.get('startDate', None),
- end_date=data.get('endDate', None))
- data = f["filter"]
- insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
+ get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
+ start_date=data.startDate,
+ end_date=data.endDate)
+ data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
+ data.events = __fix_stages(data.events)
+ insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
+ insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
+ # fix: this fix for huge drop count
+ if total_drop_due_to_issues > insights[0]["sessionsCount"]:
+ total_drop_due_to_issues = insights[0]["sessionsCount"]
+ # end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
- return {"data": {"stages": helper.list_to_camel_case(insights),
+ return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
@@ -220,25 +262,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
@dev.timed
-def get_issues_on_the_fly(funnel_id, user_id, project_id, data):
- first_stage = data.get("firstStage")
- last_stage = data.get("lastStage")
- # data["events"] = filter_stages(data.get("events", []))
- if len(data["events"]) == 0:
- f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
+def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
+ data.events = filter_stages(data.events)
+ data.events = __fix_stages(data.events)
+ if len(data.events) == 0:
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
- get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
- start_date=data.get('startDate', None),
- end_date=data.get('endDate', None))
- data = f["filter"]
+ get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
+ start_date=data.startDate,
+ end_date=data.endDate)
+ data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
+ if len(data.events) < 2:
+ return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
- significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
- last_stage=last_stage))}
+ significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
+ last_stage=len(data.events)))}
-def get(funnel_id, project_id, user_id, flatten=True):
+def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@@ -260,7 +303,11 @@ def get(funnel_id, project_id, user_id, flatten=True):
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
- # f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
+ f["filter"]["events"] = __parse_events(f["filter"]["events"])
+ f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
+ if fix_stages:
+ f["filter"]["events"] = __fix_stages(f["filter"]["events"])
+ f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
@@ -270,7 +317,7 @@ def get(funnel_id, project_id, user_id, flatten=True):
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
- f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
+ f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date
@@ -279,7 +326,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
- issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \
+ issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None
diff --git a/api/chalicelib/core/resources.py b/api/chalicelib/core/resources.py
index 6b9ba9170..6a7e395f8 100644
--- a/api/chalicelib/core/resources.py
+++ b/api/chalicelib/core/resources.py
@@ -13,7 +13,8 @@ def get_by_session_id(session_id):
header_size,
encoded_body_size,
decoded_body_size,
- success
+ success,
+ COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources
WHERE session_id = %(session_id)s;"""
params = {"session_id": session_id}
diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py
index b67df2a1e..1903cc08b 100644
--- a/api/chalicelib/core/sessions.py
+++ b/api/chalicelib/core/sessions.py
@@ -168,10 +168,11 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
@dev.timed
-def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False,
- error_status="ALL", count_only=False, issue=None):
- full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id,
- user_id)
+def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
+ error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
+ full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
+ favorite_only=data.bookmarked, issue=issue, project_id=project_id,
+ user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
@@ -198,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
+ g_sort = "count(full_sessions)"
+ if data.order is None:
+ data.order = "DESC"
+ else:
+ data.order = data.order.upper()
+ if data.sort is not None and data.sort != 'sessionsCount':
+ sort = helper.key_to_snake_case(data.sort)
+ g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})"
+ else:
+ sort = 'start_ts'
+
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions)
@@ -206,51 +218,58 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
MIN(full_sessions.start_ts) AS first_session_ts,
- ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
- FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
- FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
- {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
- {query_part}
- ORDER BY s.session_id desc) AS filtred_sessions
- ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
- GROUP BY user_id
- ORDER BY user_sessions_count DESC) AS users_sessions;""",
+ ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn
+ FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn
+ FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
+ {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
+ {query_part}
+ ) AS filtred_sessions
+ ) AS full_sessions
+ GROUP BY user_id
+ ) AS users_sessions;""",
full_args)
else:
+ if data.order is None:
+ data.order = "DESC"
+ sort = 'session_id'
+ if data.sort is not None and data.sort != "session_id":
+ sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
+ else:
+ sort = 'session_id'
+
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
- FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
+ FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
- ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
+ ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""",
full_args)
-
# print("--------------------")
# print(main_query)
# print("--------------------")
- cur.execute(main_query)
+ try:
+ cur.execute(main_query)
+ except Exception as err:
+ print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
+ print(main_query)
+ print("--------- PAYLOAD -----------")
+ print(data.dict())
+ print("--------------------")
+ raise err
+ if errors_only:
+ return helper.list_to_camel_case(cur.fetchall())
- if count_only:
- return helper.dict_to_camel_case(cur.fetchone())
sessions = cur.fetchone()
+ if count_only:
+ return helper.dict_to_camel_case(sessions)
+
total = sessions["count"]
sessions = sessions["sessions"]
- # sessions = []
- # total = cur.rowcount
- # row = cur.fetchone()
- # limit = 200
- # while row is not None and len(sessions) < limit:
- # if row.get("favorite"):
- # limit += 1
- # sessions.append(row)
- # row = cur.fetchone()
- if errors_only:
- return sessions
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
@@ -281,9 +300,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
- full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
- favorite_only=False, issue=None, project_id=project_id,
- user_id=None, extra_event=extra_event)
+ full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
+ favorite_only=False, issue=None, project_id=project_id,
+ user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
@@ -366,6 +385,19 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
return sessions
+def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
+ return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
+ schemas.EventType.graphql_details] \
+ or event.type in [schemas.PerformanceEventType.location_dom_complete,
+ schemas.PerformanceEventType.location_largest_contentful_paint_time,
+ schemas.PerformanceEventType.location_ttfb,
+ schemas.PerformanceEventType.location_avg_cpu_load,
+ schemas.PerformanceEventType.location_avg_memory_usage
+ ] and (event.source is None or len(event.source) == 0) \
+ or event.type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
+ event.filters is None or len(event.filters) == 0))
+
+
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
@@ -375,11 +407,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
"s.duration IS NOT NULL"
]
extra_from = ""
- fav_only_join = ""
- if favorite_only and not errors_only:
- fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
- extra_constraints.append("fs.user_id = %(userId)s")
- full_args["userId"] = user_id
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@@ -586,6 +613,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
+ valid_events_count = 0
+ for event in data.events:
+ is_any = _isAny_opreator(event.operator)
+ if not isinstance(event.value, list):
+ event.value = [event.value]
+ if __is_valid_event(is_any=is_any, event=event):
+ valid_events_count += 1
events_query_from = []
event_index = 0
or_events = data.events_order == schemas.SearchEventOrder._or
@@ -596,16 +630,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
- if not is_any and len(event.value) == 0 and event_type not in [schemas.EventType.request_details,
- schemas.EventType.graphql_details] \
- or event_type in [schemas.PerformanceEventType.location_dom_complete,
- schemas.PerformanceEventType.location_largest_contentful_paint_time,
- schemas.PerformanceEventType.location_ttfb,
- schemas.PerformanceEventType.location_avg_cpu_load,
- schemas.PerformanceEventType.location_avg_memory_usage
- ] and (event.source is None or len(event.source) == 0) \
- or event_type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
- event.filters is None or len(event.filters) == 0):
+ if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
is_not = False
@@ -617,6 +642,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
"main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s",
"ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"]
+ if favorite_only and not errors_only:
+ event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)"
+ event_where.append("fs.user_id = %(userId)s")
else:
event_from = "%s"
event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s",
@@ -921,7 +949,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
""")
else:
events_query_from.append(f"""\
- (SELECT main.session_id, MIN(main.timestamp) AS timestamp
+ (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp
FROM {event_from}
WHERE {" AND ".join(event_where)}
GROUP BY 1
@@ -935,16 +963,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
MIN(timestamp) AS first_event_ts,
MAX(timestamp) AS last_event_ts
FROM ({events_joiner.join(events_query_from)}) AS u
- GROUP BY 1
- {fav_only_join}"""
+ GROUP BY 1"""
else:
events_query_part = f"""SELECT
event_0.session_id,
MIN(event_0.timestamp) AS first_event_ts,
MAX(event_{event_index - 1}.timestamp) AS last_event_ts
FROM {events_joiner.join(events_query_from)}
- GROUP BY 1
- {fav_only_join}"""
+ GROUP BY 1"""
else:
data.events = []
# ---------------------------------------------------------------------------
@@ -958,24 +984,24 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# elif data.platform == schemas.PlatformType.desktop:
# extra_constraints.append(
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
- if data.order is None:
- data.order = "DESC"
- sort = 'session_id'
- if data.sort is not None and data.sort != "session_id":
- sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
- else:
- sort = 'session_id'
+
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
- if error_status != "ALL":
+ extra_constraints.append("ser.project_id = %(project_id)s")
+ if error_status != schemas.ErrorStatus.all:
extra_constraints.append("ser.status = %(error_status)s")
- full_args["status"] = error_status.lower()
+ full_args["error_status"] = error_status
if favorite_only:
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
- extra_constraints.append("ufe.user_id = %(user_id)s")
+ extra_constraints.append("ufe.user_id = %(userId)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
- if not favorite_only and not errors_only and user_id is not None:
+ if favorite_only and not errors_only and user_id is not None:
+ extra_from += """INNER JOIN (SELECT user_id, session_id
+ FROM public.user_favorite_sessions
+ WHERE user_id = %(userId)s) AS favorite_sessions
+ USING (session_id)"""
+ elif not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
@@ -1003,7 +1029,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
{extra_from}
WHERE
{" AND ".join(extra_constraints)}"""
- return full_args, query_part, sort
+ return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
@@ -1102,48 +1128,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date):
return helper.list_to_camel_case(rows)
-def get_favorite_sessions(project_id, user_id, include_viewed=False):
- with pg_client.PostgresClient() as cur:
- query_part = cur.mogrify(f"""\
- FROM public.sessions AS s
- LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id
- WHERE fs.user_id = %(userId)s""",
- {"projectId": project_id, "userId": user_id}
- )
-
- extra_query = b""
- if include_viewed:
- extra_query = cur.mogrify(""",\
- COALESCE((SELECT TRUE
- FROM public.user_viewed_sessions AS fs
- WHERE s.session_id = fs.session_id
- AND fs.user_id = %(userId)s), FALSE) AS viewed""",
- {"projectId": project_id, "userId": user_id})
-
- cur.execute(f"""\
- SELECT s.project_id,
- s.session_id::text AS session_id,
- s.user_uuid,
- s.user_id,
- s.user_os,
- s.user_browser,
- s.user_device,
- s.user_country,
- s.start_ts,
- s.duration,
- s.events_count,
- s.pages_count,
- s.errors_count,
- TRUE AS favorite
- {extra_query.decode('UTF-8')}
- {query_part.decode('UTF-8')}
- ORDER BY s.session_id
- LIMIT 50;""")
-
- sessions = cur.fetchall()
- return helper.list_to_camel_case(sessions)
-
-
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
@@ -1196,11 +1180,11 @@ def get_session_user(project_id, user_id):
"public".sessions
WHERE
project_id = %(project_id)s
- AND user_id = %(user_id)s
+ AND user_id = %(userId)s
AND duration is not null
GROUP BY user_id;
""",
- {"project_id": project_id, "user_id": user_id}
+ {"project_id": project_id, "userId": user_id}
)
cur.execute(query=query)
data = cur.fetchone()
@@ -1213,8 +1197,8 @@ def get_session_ids_by_user_ids(project_id, user_ids):
"""\
SELECT session_id FROM public.sessions
WHERE
- project_id = %(project_id)s AND user_id IN %(user_id)s;""",
- {"project_id": project_id, "user_id": tuple(user_ids)}
+ project_id = %(project_id)s AND user_id IN %(userId)s;""",
+ {"project_id": project_id, "userId": tuple(user_ids)}
)
ids = cur.execute(query=query)
return ids
@@ -1240,8 +1224,8 @@ def delete_sessions_by_user_ids(project_id, user_ids):
"""\
DELETE FROM public.sessions
WHERE
- project_id = %(project_id)s AND user_id IN %(user_id)s;""",
- {"project_id": project_id, "user_id": tuple(user_ids)}
+ project_id = %(project_id)s AND user_id IN %(userId)s;""",
+ {"project_id": project_id, "userId": tuple(user_ids)}
)
cur.execute(query=query)
diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py
index 1d342d03f..07aad2ee4 100644
--- a/api/chalicelib/core/sessions_metas.py
+++ b/api/chalicelib/core/sessions_metas.py
@@ -80,32 +80,41 @@ def get_top_key_values(project_id):
return helper.dict_to_CAPITAL_keys(row)
-def __generic_query(typename):
- return f"""\
- SELECT value, type
- FROM ((SELECT value, type
- FROM public.autocomplete
- WHERE
- project_id = %(project_id)s
- AND type ='{typename}'
- AND value ILIKE %(svalue)s
- ORDER BY value
- LIMIT 5)
+def __generic_query(typename, value_length=None):
+ if value_length is None or value_length > 2:
+ return f""" (SELECT DISTINCT value, type
+ FROM public.autocomplete
+ WHERE
+ project_id = %(project_id)s
+ AND type ='{typename}'
+ AND value ILIKE %(svalue)s
+ ORDER BY value
+ LIMIT 5)
UNION
- (SELECT value, type
+ (SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(value)s
ORDER BY value
- LIMIT 5)) AS met"""
+ LIMIT 5);"""
+ return f""" SELECT DISTINCT value, type
+ FROM public.autocomplete
+ WHERE
+ project_id = %(project_id)s
+ AND type ='{typename}'
+ AND value ILIKE %(svalue)s
+ ORDER BY value
+ LIMIT 10;"""
def __generic_autocomplete(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
- query = cur.mogrify(__generic_query(typename),
+ query = cur.mogrify(__generic_query(typename,
+ value_length=len(text) \
+ if SUPPORTED_TYPES[typename].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
@@ -120,124 +129,73 @@ SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
- value_limit=2,
- starts_with="",
- starts_limit=2,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
- value_limit=2,
- starts_with="",
- starts_limit=2,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
- value_limit=5,
- starts_with="/",
- starts_limit=5,
- ignore_if_starts_with=[]),
+ change_by_length=True),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
- value_limit=2,
- starts_with="",
- starts_limit=2,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
- value_limit=2,
- starts_with="",
- starts_limit=2,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
- value_limit=3,
- starts_with="",
- starts_limit=3,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
- value_limit=0,
- starts_with="",
- starts_limit=0,
- ignore_if_starts_with=["/"]),
+ change_by_length=True),
}
@@ -247,6 +205,7 @@ def search(text, meta_type, project_id):
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
- if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
- rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
+ # for IOS events autocomplete
+ # if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
+ # rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}
diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py
index ab242d7e8..2e698dcfd 100644
--- a/api/chalicelib/core/significance.py
+++ b/api/chalicelib/core/significance.py
@@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
+ i = -1
+ for s in stages:
- for i, s in enumerate(stages):
- if i == 0:
- extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
- else:
- extra_from = []
if s.get("operator") is None:
s["operator"] = "is"
@@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
is_any = sessions._isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
+ i += 1
+ if i == 0:
+ extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
+ else:
+ extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
@@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
- AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
+ AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
) AS issues_t
@@ -526,7 +528,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
split = issue.split('__^__')
issues_dict['significant' if is_sign else 'insignificant'].append({
"type": split[0],
- "title": get_issue_title(split[0]),
+ "title": helper.get_issue_title(split[0]),
"affected_sessions": affected_sessions[issue],
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
"lost_conversions": lost_conversions,
@@ -639,27 +641,3 @@ def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
output['stages'] = stages_list
output['criticalIssuesCount'] = n_critical_issues
return output
-
-
-def get_issue_title(issue_type):
- return {'click_rage': "Click Rage",
- 'dead_click': "Dead Click",
- 'excessive_scrolling': "Excessive Scrolling",
- 'bad_request': "Bad Request",
- 'missing_resource': "Missing Image",
- 'memory': "High Memory Usage",
- 'cpu': "High CPU",
- 'slow_resource': "Slow Resource",
- 'slow_page_load': "Slow Page Performance",
- 'crash': "Crash",
- 'ml_cpu': "High CPU",
- 'ml_memory': "High Memory Usage",
- 'ml_dead_click': "Dead Click",
- 'ml_click_rage': "Click Rage",
- 'ml_mouse_thrashing': "Mouse Thrashing",
- 'ml_excessive_scrolling': "Excessive Scrolling",
- 'ml_slow_resources': "Slow Resource",
- 'custom': "Custom Event",
- 'js_exception': "Error",
- 'custom_event_error': "Custom Error",
- 'js_error': "Error"}.get(issue_type, issue_type)
diff --git a/api/chalicelib/utils/event_filter_definition.py b/api/chalicelib/utils/event_filter_definition.py
index 4c132cb13..b21d49b9c 100644
--- a/api/chalicelib/utils/event_filter_definition.py
+++ b/api/chalicelib/utils/event_filter_definition.py
@@ -6,10 +6,7 @@ class Event:
class SupportedFilter:
- def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with):
+ def __init__(self, get, query, change_by_length):
self.get = get
self.query = query
- self.valueLimit = value_limit
- self.startsWith = starts_with
- self.startsLimit = starts_limit
- self.ignoreIfStartsWith = ignore_if_starts_with
+ self.change_by_length = change_by_length
diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py
index 8e1f5788c..8cfab8a3f 100644
--- a/api/chalicelib/utils/helper.py
+++ b/api/chalicelib/utils/helper.py
@@ -213,11 +213,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
if value is None:
return value
if op == schemas.SearchEventOperator._starts_with:
- return value + '%'
+ return f"{value}%"
elif op == schemas.SearchEventOperator._ends_with:
- return '%' + value
+ return f"%{value}"
elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains:
- return '%' + value + '%'
+ return f"%{value}%"
return value
diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py
index c598d8971..6e4118689 100644
--- a/api/chalicelib/utils/pg_client.py
+++ b/api/chalicelib/utils/pg_client.py
@@ -5,11 +5,12 @@ import psycopg2.extras
from decouple import config
from psycopg2 import pool
-PG_CONFIG = {"host": config("pg_host"),
- "database": config("pg_dbname"),
- "user": config("pg_user"),
- "password": config("pg_password"),
- "port": config("pg_port", cast=int)}
+_PG_CONFIG = {"host": config("pg_host"),
+ "database": config("pg_dbname"),
+ "user": config("pg_user"),
+ "password": config("pg_password"),
+ "port": config("pg_port", cast=int)}
+PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
@@ -63,7 +64,7 @@ class PostgresClient:
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
- self.connection = psycopg2.connect(**PG_CONFIG)
+ self.connection = psycopg2.connect(**_PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()
diff --git a/api/routers/core.py b/api/routers/core.py
index 73ae5fc20..97a749429 100644
--- a/api/routers/core.py
+++ b/api/routers/core.py
@@ -21,13 +21,6 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
-@app.get('/{projectId}/sessions2/favorite', tags=["sessions"])
-def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
- return {
- 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True)
- }
-
-
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
@@ -126,14 +119,14 @@ def events_search(projectId: int, q: str,
else:
return {"data": []}
- result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key)
+ result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key)
return result
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
- data = sessions.search2_pg(data, projectId, user_id=context.user_id)
+ data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext =
return {'data': sessions_metas.get_top_key_values(projectId)}
-@app.get('/{projectId}/sessions/filters/search', tags=["sessions"])
-def get_session_filters_meta(projectId: int, q: str, type: str,
- context: schemas.CurrentContext = Depends(OR_context)):
- meta_type = type
- if len(meta_type) == 0:
- return {"data": []}
- if len(q) == 0:
- return {"data": []}
- return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
-
-
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
@@ -716,7 +698,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
- data=data.dict())
+ data=data)
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
@@ -731,7 +713,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
- data=data.dict())}
+ data=data)}
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
@@ -755,10 +737,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
+ if issue is None:
+ return {"errors": ["issue not found"]}
return {
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
- start_date=startDate,
- end_date=endDate),
+ start_date=startDate, end_date=endDate),
"issue": issue}}
@@ -837,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
-def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
- data = assist.get_live_sessions_ws(projectId)
- return {'data': data}
-
-
-@app.post('/{projectId}/assist/sessions', tags=["assist"])
-def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...),
- context: schemas.CurrentContext = Depends(OR_context)):
- data = assist.get_live_sessions_ws(projectId)
+def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
+ data = assist.get_live_sessions_ws(projectId, user_id=userId)
return {'data': data}
@@ -901,13 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors'])
-def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False,
- data: schemas.SearchErrorsSchema = Body(...),
+def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
- if isinstance(favorite, str):
- favorite = True if len(favorite) == 0 else False
- return errors.search(data.dict(), projectId, user_id=context.user_id, status=status,
- favorite_only=favorite)
+ return errors.search(data, projectId, user_id=context.user_id)
@app.get('/{projectId}/errors/stats', tags=['errors'])
diff --git a/api/schemas.py b/api/schemas.py
index cf4ae6cd3..77cb78c05 100644
--- a/api/schemas.py
+++ b/api/schemas.py
@@ -11,6 +11,10 @@ def attribute_to_camel_case(snake_str):
return components[0] + ''.join(x.title() for x in components[1:])
+def transform_email(email: str) -> str:
+ return email.lower() if isinstance(email, str) else email
+
+
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@@ -18,6 +22,7 @@ class _Grecaptcha(BaseModel):
class UserLoginSchema(_Grecaptcha):
email: EmailStr = Field(...)
password: str = Field(...)
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class UserSignupSchema(UserLoginSchema):
@@ -31,17 +36,21 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
- email: Optional[str] = Field(None)
+ email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
+
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha):
- email: str = Field(...)
+ email: EmailStr = Field(...)
+
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserPasswordSchema(BaseModel):
@@ -70,7 +79,9 @@ class CurrentAPIContext(BaseModel):
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
- email: str = Field(...)
+ email: EmailStr = Field(...)
+
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
@@ -83,15 +94,6 @@ class EditSlackSchema(BaseModel):
url: HttpUrl = Field(...)
-class SearchErrorsSchema(BaseModel):
- platform: Optional[str] = Field(None)
- startDate: Optional[int] = Field(TimeUTC.now(-7))
- endDate: Optional[int] = Field(TimeUTC.now())
- density: Optional[int] = Field(7)
- sort: Optional[str] = Field(None)
- order: Optional[str] = Field(None)
-
-
class CreateNotificationSchema(BaseModel):
token: str = Field(...)
notifications: List = Field(...)
@@ -124,15 +126,19 @@ class CreateEditWebhookSchema(BaseModel):
class CreateMemberSchema(BaseModel):
userId: Optional[int] = Field(None)
name: str = Field(...)
- email: str = Field(...)
+ email: EmailStr = Field(...)
admin: bool = Field(False)
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
+
class EditMemberSchema(BaseModel):
name: str = Field(...)
- email: str = Field(...)
+ email: EmailStr = Field(...)
admin: bool = Field(False)
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
+
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
@@ -253,6 +259,8 @@ class EmailPayloadSchema(BaseModel):
link: str = Field(...)
message: str = Field(...)
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
+
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
@@ -261,6 +269,8 @@ class MemberInvitationPayloadSchema(BaseModel):
client_id: str = Field(...)
sender_name: str = Field(...)
+ _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
+
class Config:
alias_generator = attribute_to_camel_case
@@ -609,11 +619,12 @@ class SessionsSearchPayloadSchema(BaseModel):
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(default="startTs")
- order: str = Field(default="DESC")
+ order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
+ bookmarked: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
@@ -662,6 +673,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
+ rangeValue: Optional[str] = Field(None)
@root_validator(pre=True)
def enforce_default_values(cls, values):
@@ -694,6 +706,27 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
+ rangeValue: Optional[str] = Field(None)
+
+
+class ErrorStatus(str, Enum):
+ all = 'all'
+ unresolved = 'unresolved'
+ resolved = 'resolved'
+ ignored = 'ignored'
+
+
+class ErrorSort(str, Enum):
+ occurrence = 'occurrence'
+ users_count = 'users'
+ sessions_count = 'sessions'
+
+
+class SearchErrorsSchema(SessionsSearchPayloadSchema):
+ sort: ErrorSort = Field(default=ErrorSort.occurrence)
+ density: Optional[int] = Field(7)
+ status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
+ query: Optional[str] = Field(default=None)
class MetricPayloadSchema(BaseModel):
diff --git a/backend/pkg/url/assets/url.go b/backend/pkg/url/assets/url.go
index b087878b9..1fe717531 100644
--- a/backend/pkg/url/assets/url.go
+++ b/backend/pkg/url/assets/url.go
@@ -9,16 +9,17 @@ import (
func getSessionKey(sessionID uint64) string {
// Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID
- return strconv.FormatUint(sessionID>>50, 10)
+ return strconv.FormatUint(sessionID>>50, 10)
}
func ResolveURL(baseurl string, rawurl string) string {
+ rawurl = strings.Trim(rawurl, " ")
if !isRelativeCachable(rawurl) {
return rawurl
}
base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls
- u, _ := url.Parse(rawurl) // TODO: handle errors ?
- if base == nil || u == nil {
+ u, _ := url.Parse(rawurl) // TODO: handle errors ?
+ if base == nil || u == nil {
return rawurl
}
return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl)
@@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string {
}
func GetCachePathForAssets(sessionID uint64, rawurl string) string {
- return getCachePathWithKey(sessionID, rawurl)
+ return getCachePathWithKey(sessionID, rawurl)
}
-
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string {
fullURL, cachable := GetFullCachableURL(baseURL, relativeURL)
if !cachable {
return fullURL
}
- u := url.URL{
- Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
- Host: r.assetsURL.Host,
- Scheme: r.assetsURL.Scheme,
+ u := url.URL{
+ Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
+ Host: r.assetsURL.Host,
+ Scheme: r.assetsURL.Scheme,
}
return u.String()
}
-
diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py
index 04efdbb32..8531d89a3 100644
--- a/ee/api/chalicelib/core/errors.py
+++ b/ee/api/chalicelib/core/errors.py
@@ -1,8 +1,9 @@
import json
+import schemas
from chalicelib.core import dashboard
from chalicelib.core import sourcemaps, sessions
-from chalicelib.utils import ch_client
+from chalicelib.utils import ch_client, metrics_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data):
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE pe.error_id = fe.error_id
- AND fe.user_id = %(user_id)s), FALSE) AS favorite,
+ AND fe.user_id = %(userId)s), FALSE) AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
@@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data):
AND error_id = %(error_id)s
ORDER BY start_ts DESC
LIMIT 1;""",
- {"project_id": project_id, "error_id": error_id, "user_id": user_id})
+ {"project_id": project_id, "error_id": error_id, "userId": user_id})
cur.execute(query=query)
status = cur.fetchone()
@@ -423,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if time_constraint:
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
- if platform == 'mobile':
+ if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
- elif platform == 'desktop':
+ elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@@ -437,60 +438,280 @@ def __get_step_size(startTimestamp, endTimestamp, density):
def __get_sort_key(key):
return {
- "datetime": "max_datetime",
- "lastOccurrence": "max_datetime",
- "firstOccurrence": "min_datetime"
+ schemas.ErrorSort.occurrence: "max_datetime",
+ schemas.ErrorSort.users_count: "users",
+ schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
-def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
- status = status.upper()
- if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
- return {"errors": ["invalid error status"]}
- ch_sub_query = __get_basic_constraints(data.get('platform'))
- ch_sub_query.append("source ='js_exception'")
+def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate",
+ endTime_arg_name="endDate", chart=False, step_size_name="step_size",
+ project_key="project_id"):
+ if project_key is None:
+ ch_sub_query = []
+ else:
+ ch_sub_query = [f"{project_key} =%(project_id)s"]
+ if time_constraint:
+ ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
+ f"timestamp < %({endTime_arg_name})s"]
+ if chart:
+ ch_sub_query += [f"timestamp >= generated_timestamp",
+ f"timestamp < generated_timestamp + %({step_size_name})s"]
+ if platform == schemas.PlatformType.mobile:
+ ch_sub_query.append("user_device_type = 'mobile'")
+ elif platform == schemas.PlatformType.desktop:
+ ch_sub_query.append("user_device_type = 'desktop'")
+ return ch_sub_query
+
+
+def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
+ empty_response = {"data": {
+ 'total': 0,
+ 'errors': []
+ }}
+
+ platform = None
+ for f in data.filters:
+ if f.type == schemas.FilterType.platform and len(f.value) > 0:
+ platform = f.value[0]
+ pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id")
+ pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
+ "pe.project_id=%(project_id)s"]
+ pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None)
+ # pg_sub_query_chart.append("source ='js_exception'")
+ pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
- if data.get("startDate") is None:
- data["startDate"] = TimeUTC.now(-30)
- if data.get("endDate") is None:
- data["endDate"] = TimeUTC.now(1)
- if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-30)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now(1)
+ if len(data.events) > 0 or len(data.filters) > 0:
+ print("-- searching for sessions before errors")
+ # if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
- error_status=status, favorite_only=favorite_only)
- error_ids = [e["error_id"] for e in statuses]
+ error_status=data.status)
if len(statuses) == 0:
- return {"data": {
- 'total': 0,
- 'errors': []
- }}
- with ch_client.ClickHouseClient() as ch:
- if data.get("startDate") is None:
- data["startDate"] = TimeUTC.now(-7)
- if data.get("endDate") is None:
- data["endDate"] = TimeUTC.now()
- density = data.get("density", 7)
- step_size = __get_step_size(data["startDate"], data["endDate"], density)
+ return empty_response
+ error_ids = [e["errorId"] for e in statuses]
+ with pg_client.PostgresClient() as cur:
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-7)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now()
+ step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
- if data.get("sort") is not None:
- sort = __get_sort_key(data["sort"])
+ if data.sort is not None:
+ sort = __get_sort_key(data.sort)
order = "DESC"
- if data.get("order") is not None:
- order = data["order"]
+ if data.order is not None:
+ order = data.order
+ extra_join = ""
params = {
- "startDate": data['startDate'],
- "endDate": data['endDate'],
+ "startDate": data.startDate,
+ "endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
+ if data.status != schemas.ErrorStatus.all:
+ pg_sub_query.append("status = %(error_status)s")
+ params["error_status"] = data.status
+ if data.limit is not None and data.page is not None:
+ params["errors_offset"] = (data.page - 1) * data.limit
+ params["errors_limit"] = data.limit
+ else:
+ params["errors_offset"] = 0
+ params["errors_limit"] = 200
+
+ if error_ids is not None:
+ params["error_ids"] = tuple(error_ids)
+ pg_sub_query.append("error_id IN %(error_ids)s")
+ if data.bookmarked:
+ pg_sub_query.append("ufe.user_id = %(userId)s")
+ extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
+ if data.query is not None and len(data.query) > 0:
+ pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
+ params["error_query"] = helper.values_for_operator(value=data.query,
+ op=schemas.SearchEventOperator._contains)
+
+ main_pg_query = f"""SELECT full_count,
+ error_id,
+ name,
+ message,
+ users,
+ sessions,
+ last_occurrence,
+ first_occurrence,
+ chart
+ FROM (SELECT COUNT(details) OVER () AS full_count, details.*
+ FROM (SELECT error_id,
+ name,
+ message,
+ COUNT(DISTINCT user_uuid) AS users,
+ COUNT(DISTINCT session_id) AS sessions,
+ MAX(timestamp) AS max_datetime,
+ MIN(timestamp) AS min_datetime
+ FROM events.errors
+ INNER JOIN public.errors AS pe USING (error_id)
+ INNER JOIN public.sessions USING (session_id)
+ {extra_join}
+ WHERE {" AND ".join(pg_sub_query)}
+ GROUP BY error_id, name, message
+ ORDER BY {sort} {order}) AS details
+ LIMIT %(errors_limit)s OFFSET %(errors_offset)s
+ ) AS details
+ INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
+ MIN(timestamp) AS first_occurrence
+ FROM events.errors
+ WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
+ INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
+ FROM (SELECT generated_timestamp AS timestamp,
+ COUNT(session_id) AS count
+ FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
+ LEFT JOIN LATERAL (SELECT DISTINCT session_id
+ FROM events.errors
+ WHERE {" AND ".join(pg_sub_query_chart)}
+ ) AS sessions ON (TRUE)
+ GROUP BY timestamp
+ ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
+
+ # print("--------------------")
+ # print(cur.mogrify(main_pg_query, params))
+ # print("--------------------")
+
+ cur.execute(cur.mogrify(main_pg_query, params))
+ rows = cur.fetchall()
+ total = 0 if len(rows) == 0 else rows[0]["full_count"]
+ if flows:
+ return {"data": {"count": total}}
+
+ if total == 0:
+ rows = []
+ else:
+ if len(statuses) == 0:
+ query = cur.mogrify(
+ """SELECT error_id, status, parent_error_id, payload,
+ COALESCE((SELECT TRUE
+ FROM public.user_favorite_errors AS fe
+ WHERE errors.error_id = fe.error_id
+ AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
+ COALESCE((SELECT TRUE
+ FROM public.user_viewed_errors AS ve
+ WHERE errors.error_id = ve.error_id
+ AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
+ FROM public.errors
+ WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
+ {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
+ "user_id": user_id})
+ cur.execute(query=query)
+ statuses = helper.list_to_camel_case(cur.fetchall())
+ statuses = {
+ s["errorId"]: s for s in statuses
+ }
+
+ for r in rows:
+ r.pop("full_count")
+ if r["error_id"] in statuses:
+ r["status"] = statuses[r["error_id"]]["status"]
+ r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
+ r["favorite"] = statuses[r["error_id"]]["favorite"]
+ r["viewed"] = statuses[r["error_id"]]["viewed"]
+ r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
+ else:
+ r["status"] = "untracked"
+ r["parent_error_id"] = None
+ r["favorite"] = False
+ r["viewed"] = False
+ r["stack"] = None
+
+ offset = len(rows)
+ rows = [r for r in rows if r["stack"] is None
+ or (len(r["stack"]) == 0 or len(r["stack"]) > 1
+ or len(r["stack"]) > 0
+ and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
+ offset -= len(rows)
+ return {
+ "data": {
+ 'total': total - offset,
+ 'errors': helper.list_to_camel_case(rows)
+ }
+ }
+
+
+# refactor this function after clickhouse structure changes (missing search by query)
+def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
+ empty_response = {"data": {
+ 'total': 0,
+ 'errors': []
+ }}
+ platform = None
+ for f in data.filters:
+ if f.type == schemas.FilterType.platform and len(f.value) > 0:
+ platform = f.value[0]
+ ch_sub_query = __get_basic_constraints(platform)
+ ch_sub_query.append("source ='js_exception'")
+ statuses = []
+ error_ids = None
+ # Clickhouse keeps data for the past month only, so no need to search beyond that
+ if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
+ data.startDate = TimeUTC.now(-30)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now(1)
+ if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all:
+ print("-- searching for sessions before errors")
+ # if favorite_only=True search for sessions associated with favorite_error
+ statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
+ error_status=data.status)
+ if len(statuses) == 0:
+ return empty_response
+ error_ids = [e["errorId"] for e in statuses]
+ with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
+ if data.startDate is None:
+ data.startDate = TimeUTC.now(-7)
+ if data.endDate is None:
+ data.endDate = TimeUTC.now()
+ step_size = __get_step_size(data.startDate, data.endDate, data.density)
+ sort = __get_sort_key('datetime')
+ if data.sort is not None:
+ sort = __get_sort_key(data.sort)
+ order = "DESC"
+ if data.order is not None:
+ order = data.order
+ params = {
+ "startDate": data.startDate,
+ "endDate": data.endDate,
+ "project_id": project_id,
+ "userId": user_id,
+ "step_size": step_size}
+ if data.limit is not None and data.page is not None:
+ params["errors_offset"] = (data.page - 1) * data.limit
+ params["errors_limit"] = data.limit
+ else:
+ params["errors_offset"] = 0
+ params["errors_limit"] = 200
+ if data.bookmarked:
+ cur.execute(cur.mogrify(f"""SELECT error_id
+ FROM public.user_favorite_errors
+ WHERE user_id = %(userId)s
+ {"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
+ {"userId": user_id, "error_ids": tuple(error_ids or [])}))
+ error_ids = cur.fetchall()
+ if len(error_ids) == 0:
+ return empty_response
+ error_ids = [e["error_id"] for e in error_ids]
+
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
ch_sub_query.append("error_id IN %(error_ids)s")
+
main_ch_query = f"""\
SELECT COUNT(DISTINCT error_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)};"""
+ # print("------------")
+ # print(ch.client().substitute_params(main_ch_query, params))
+ # print("------------")
total = ch.execute(query=main_ch_query, params=params)[0]["count"]
if flows:
return {"data": {"count": total}}
@@ -510,9 +731,10 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
- LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
- FROM errors
- GROUP BY error_id) AS time_details
+ LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
+ INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
+ FROM errors
+ GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
@@ -523,35 +745,36 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
- # print("--------------------")
- # print(main_ch_query % params)
+ # print("------------")
+ # print(ch.client().substitute_params(main_ch_query, params))
+ # print("------------")
+
rows = ch.execute(query=main_ch_query, params=params)
if len(statuses) == 0:
- with pg_client.PostgresClient() as cur:
- query = cur.mogrify(
- """SELECT error_id, status, parent_error_id, payload,
- COALESCE((SELECT TRUE
- FROM public.user_favorite_errors AS fe
- WHERE errors.error_id = fe.error_id
- AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
- COALESCE((SELECT TRUE
- FROM public.user_viewed_errors AS ve
- WHERE errors.error_id = ve.error_id
- AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
- FROM public.errors
- WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
- {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
- "user_id": user_id})
- cur.execute(query=query)
- statuses = cur.fetchall()
+ query = cur.mogrify(
+ """SELECT error_id, status, parent_error_id, payload,
+ COALESCE((SELECT TRUE
+ FROM public.user_favorite_errors AS fe
+ WHERE errors.error_id = fe.error_id
+ AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
+ COALESCE((SELECT TRUE
+ FROM public.user_viewed_errors AS ve
+ WHERE errors.error_id = ve.error_id
+ AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
+ FROM public.errors
+ WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
+ {"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
+ "userId": user_id})
+ cur.execute(query=query)
+ statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
- s["error_id"]: s for s in statuses
+ s["errorId"]: s for s in statuses
}
for r in rows:
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
- r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
+ r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@@ -565,9 +788,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
r["chart"] = list(r["chart"])
for i in range(len(r["chart"])):
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
- r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"],
- end_time=data["endDate"],
- density=density, neutral={"count": 0})
+ r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
+ end_time=data.endDate,
+ density=data.density, neutral={"count": 0})
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
@@ -593,7 +816,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
- error = get(error_id=error_id)
+ error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":
@@ -766,7 +989,7 @@ def format_first_stack_frame(error):
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
- """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
+ """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
@@ -780,7 +1003,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
- {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
+ {"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()
diff --git a/ee/api/chalicelib/core/resources.py b/ee/api/chalicelib/core/resources.py
index 557135804..332d3709a 100644
--- a/ee/api/chalicelib/core/resources.py
+++ b/ee/api/chalicelib/core/resources.py
@@ -7,7 +7,7 @@ def get_by_session_id(session_id):
with ch_client.ClickHouseClient() as ch:
ch_query = """\
SELECT
- datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success
+ datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s);"""
params = {"session_id": session_id}
diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py
index cf5e7378f..52fc737e4 100644
--- a/ee/api/routers/core_dynamic.py
+++ b/ee/api/routers/core_dynamic.py
@@ -41,6 +41,11 @@ def login(data: schemas.UserLoginSchema = Body(...)):
status_code=status.HTTP_401_UNAUTHORIZED,
detail="You’ve entered invalid Email or Password."
)
+ if "errors" in r:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=r["errors"][0]
+ )
tenant_id = r.pop("tenantId")
diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go
index 1483c2ccf..ca0544923 100644
--- a/ee/backend/pkg/kafka/consumer.go
+++ b/ee/backend/pkg/kafka/consumer.go
@@ -9,9 +9,9 @@ import (
"github.com/pkg/errors"
+ "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/queue/types"
- "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
)
type Message = kafka.Message
@@ -19,7 +19,7 @@ type Message = kafka.Message
type Consumer struct {
c *kafka.Consumer
messageHandler types.MessageHandler
- commitTicker *time.Ticker
+ commitTicker *time.Ticker
pollTimeout uint
lastKafkaEventTs int64
@@ -56,7 +56,7 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand
return &Consumer{
c: c,
messageHandler: messageHandler,
- commitTicker: time.NewTicker(2 * time.Minute),
+ commitTicker: time.NewTicker(2 * time.Minute),
pollTimeout: 200,
}
}
@@ -65,13 +65,12 @@ func (consumer *Consumer) DisableAutoCommit() {
consumer.commitTicker.Stop()
}
-
func (consumer *Consumer) Commit() error {
consumer.c.Commit() // TODO: return error if it is not "No offset stored"
return nil
}
-func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
+func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
assigned, err := consumer.c.Assignment()
if err != nil {
return err
@@ -84,37 +83,38 @@ func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
timestamps = append(timestamps, p)
}
offsets, err := consumer.c.OffsetsForTimes(timestamps, 2000)
- if err != nil {
+ if err != nil {
return errors.Wrap(err, "Kafka Consumer back commit error")
}
// Limiting to already committed
committed, err := consumer.c.Committed(assigned, 2000) // memorise?
- logPartitions("Actually committed:",committed)
+ logPartitions("Actually committed:", committed)
if err != nil {
return errors.Wrap(err, "Kafka Consumer retrieving committed error")
}
for _, offs := range offsets {
for _, comm := range committed {
- if comm.Offset == kafka.OffsetStored ||
+ if comm.Offset == kafka.OffsetStored ||
comm.Offset == kafka.OffsetInvalid ||
- comm.Offset == kafka.OffsetBeginning ||
- comm.Offset == kafka.OffsetEnd { continue }
- if comm.Partition == offs.Partition &&
+ comm.Offset == kafka.OffsetBeginning ||
+ comm.Offset == kafka.OffsetEnd {
+ continue
+ }
+ if comm.Partition == offs.Partition &&
(comm.Topic != nil && offs.Topic != nil && *comm.Topic == *offs.Topic) &&
- comm.Offset > offs.Offset {
+ comm.Offset > offs.Offset {
offs.Offset = comm.Offset
}
}
}
- // TODO: check per-partition errors: offsets[i].Error
+ // TODO: check per-partition errors: offsets[i].Error
_, err = consumer.c.CommitOffsets(offsets)
return errors.Wrap(err, "Kafka Consumer back commit error")
}
-
-func (consumer *Consumer) CommitBack(gap int64) error {
+func (consumer *Consumer) CommitBack(gap int64) error {
if consumer.lastKafkaEventTs == 0 {
return nil
}
@@ -135,31 +135,31 @@ func (consumer *Consumer) ConsumeNext() error {
}
switch e := ev.(type) {
- case *kafka.Message:
- if e.TopicPartition.Error != nil {
- return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
- }
- ts := e.Timestamp.UnixNano()/ 1e6
- consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
- Topic: *(e.TopicPartition.Topic),
- ID: uint64(e.TopicPartition.Offset),
- Timestamp: ts,
- })
- consumer.lastKafkaEventTs = ts
- // case kafka.AssignedPartitions:
- // logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
- // consumer.partitions = e.Partitions
- // consumer.c.Assign(e.Partitions)
- // log.Printf("Actually partitions assigned!")
- // case kafka.RevokedPartitions:
- // log.Println("Kafka Cosumer: Partitions Revoked")
- // consumer.partitions = nil
- // consumer.c.Unassign()
- case kafka.Error:
- if e.Code() == kafka.ErrAllBrokersDown {
- os.Exit(1)
- }
- log.Printf("Consumer error: %v\n", e)
+ case *kafka.Message:
+ if e.TopicPartition.Error != nil {
+ return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
+ }
+ ts := e.Timestamp.UnixNano() / 1e6
+ consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
+ Topic: *(e.TopicPartition.Topic),
+ ID: uint64(e.TopicPartition.Offset),
+ Timestamp: ts,
+ })
+ consumer.lastKafkaEventTs = ts
+ // case kafka.AssignedPartitions:
+ // logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
+ // consumer.partitions = e.Partitions
+ // consumer.c.Assign(e.Partitions)
+ // log.Printf("Actually partitions assigned!")
+ // case kafka.RevokedPartitions:
+ // log.Println("Kafka Cosumer: Partitions Revoked")
+ // consumer.partitions = nil
+ // consumer.c.Unassign()
+ case kafka.Error:
+ if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded {
+ os.Exit(1)
+ }
+ log.Printf("Consumer error: %v\n", e)
}
return nil
}
@@ -173,8 +173,6 @@ func (consumer *Consumer) Close() {
}
}
-
-
// func (consumer *Consumer) consume(
// message func(m *kafka.Message) error,
// commit func(c *kafka.Consumer) error,
@@ -230,7 +228,6 @@ func (consumer *Consumer) Close() {
// }
// }
-
// func (consumer *Consumer) Consume(
// message func(key uint64, value []byte) error,
// ) error {
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql
new file mode 100644
index 000000000..1a640b4be
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.4/1.5.4.sql
@@ -0,0 +1,91 @@
+\set ON_ERROR_STOP true
+SET client_min_messages TO NOTICE;
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.5.4-ee'
+$$ LANGUAGE sql IMMUTABLE;
+
+
+-- to detect duplicate users and delete them if possible
+DO
+$$
+ DECLARE
+ duplicate RECORD;
+ BEGIN
+ IF EXISTS(SELECT user_id
+ FROM users
+ WHERE lower(email) =
+ (SELECT LOWER(email)
+ FROM users AS su
+ WHERE LOWER(su.email) = LOWER(users.email)
+ AND su.user_id != users.user_id
+ LIMIT 1)
+ ORDER BY LOWER(email)) THEN
+ raise notice 'duplicate users detected';
+ FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat
+ FROM users
+ WHERE lower(email) =
+ (SELECT LOWER(email)
+ FROM users AS su
+ WHERE LOWER(su.email) = LOWER(users.email)
+ AND su.user_id != users.user_id
+ LIMIT 1)
+ ORDER BY LOWER(email)
+ LOOP
+ IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
+ raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
+ DELETE FROM users WHERE user_id = duplicate.user_id;
+ END IF;
+ END LOOP;
+ IF EXISTS(SELECT user_id
+ FROM users
+ WHERE lower(email) =
+ (SELECT LOWER(email)
+ FROM users AS su
+ WHERE LOWER(su.email) = LOWER(users.email)
+ AND su.user_id != users.user_id
+ LIMIT 1)
+ ORDER BY LOWER(email)) THEN
+ raise notice 'remaining duplicates, please fix (delete) before finishing update';
+ FOR duplicate IN SELECT user_id, email
+ FROM users
+ WHERE lower(email) =
+ (SELECT LOWER(email)
+ FROM users AS su
+ WHERE LOWER(su.email) = LOWER(users.email)
+ AND su.user_id != users.user_id
+ LIMIT 1)
+ ORDER BY LOWER(email)
+ LOOP
+ raise notice 'user: % %',duplicate.user_id,duplicate.email;
+ END LOOP;
+ RAISE 'Duplicate users' USING ERRCODE = '42710';
+ END IF;
+ END IF;
+ END;
+$$
+LANGUAGE plpgsql;
+
+UPDATE users
+SET email=LOWER(email);
+
+DROP INDEX IF EXISTS autocomplete_value_gin_idx;
+COMMIT;
+
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
+CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index 633e64caa..9adab50e0 100644
--- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
-SELECT 'v1.5.3-ee'
+SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
@@ -721,7 +721,22 @@ $$
CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
- CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
+
+ CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
+ CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
+ CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
+ CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
+ CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
+ CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
+ CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
+ CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
+ CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
+ CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
+ CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
+ CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
+ CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
+ CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
+ CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
BEGIN
IF NOT EXISTS(SELECT *
@@ -1018,7 +1033,7 @@ $$
CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
-
+
CREATE TABLE IF NOT EXISTS events.state_actions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
diff --git a/ee/utilities/server.js b/ee/utilities/server.js
index f1209c9ff..d049faa19 100644
--- a/ee/utilities/server.js
+++ b/ee/utilities/server.js
@@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers
var express = require('express');
const {ExpressPeerServer} = require('peer');
var socket;
-if (process.env.cluster === "true") {
+if (process.env.redis === "true") {
console.log("Using Redis");
socket = require("./servers/websocket-cluster");
} else {
diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/utilities/servers/websocket-cluster.js
index 940f83879..c044043a5 100644
--- a/ee/utilities/servers/websocket-cluster.js
+++ b/ee/utilities/servers/websocket-cluster.js
@@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader;
const {extractPeerId} = require('./peerjs-server');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
-
-var wsRouter = express.Router();
+const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
-// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
+const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
+const pubClient = createClient({url: REDIS_URL});
+const subClient = pubClient.duplicate();
let io;
const debug = process.env.debug === "1" || false;
-const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
-const pubClient = createClient({url: REDIS_URL});
-const subClient = pubClient.duplicate();
+const createSocketIOServer = function (server, prefix) {
+ if (process.env.uws !== "true") {
+ io = _io(server, {
+ maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
+ cors: {
+ origin: "*",
+ methods: ["GET", "POST", "PUT"]
+ },
+ path: (prefix ? prefix : '') + '/socket'
+ });
+ } else {
+ io = new _io.Server({
+ maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
+ cors: {
+ origin: "*",
+ methods: ["GET", "POST", "PUT"]
+ },
+ path: (prefix ? prefix : '') + '/socket'
+ // transports: ['websocket'],
+ // upgrade: false
+ });
+ io.attachApp(server);
+ }
+}
const uniqueSessions = function (data) {
let resArr = [];
@@ -36,18 +58,40 @@ const uniqueSessions = function (data) {
return resArr;
}
-const socketsList = async function (req, res) {
- debug && console.log("[WS]looking for all available sessions");
- let liveSessions = {};
- let rooms = await io.of('/').adapter.allRooms();
- for (let peerId of rooms) {
- let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey !== undefined) {
- liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(sessionId);
+const extractUserIdFromRequest = function (req) {
+ if (process.env.uws === "true") {
+ if (req.getQuery("userId")) {
+ debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
+ return req.getQuery("userId");
}
+ } else if (req.query.userId) {
+ debug && console.log(`[WS]where userId=${req.query.userId}`);
+ return req.query.userId;
}
- let result = {"data": liveSessions};
+ return undefined;
+}
+
+const extractProjectKeyFromRequest = function (req) {
+ if (process.env.uws === "true") {
+ if (req.getParameter(0)) {
+ debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
+ return req.getParameter(0);
+ }
+ } else if (req.params.projectKey) {
+ debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
+ return req.params.projectKey;
+ }
+ return undefined;
+}
+
+
+const getAvailableRooms = async function () {
+ let rooms = await io.of('/').adapter.allRooms();
+ return rooms;
+}
+
+const respond = function (res, data) {
+ let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@@ -56,37 +100,64 @@ const socketsList = async function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
+
+const socketsList = async function (req, res) {
+ debug && console.log("[WS]looking for all available sessions");
+ let userId = extractUserIdFromRequest(req);
+
+ let liveSessions = {};
+ let rooms = await getAvailableRooms();
+ for (let peerId of rooms) {
+ let {projectKey, sessionId} = extractPeerId(peerId);
+ if (projectKey !== undefined) {
+ liveSessions[projectKey] = liveSessions[projectKey] || [];
+ if (userId) {
+ const connected_sockets = await io.in(peerId).fetchSockets();
+ for (let item of connected_sockets) {
+ if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
+ } else {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
+ }
+ respond(res, liveSessions);
+}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
- if (process.env.uws === "true") {
- req.params = {projectKey: req.getParameter(0)};
- }
- debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
+ debug && console.log("[WS]looking for available sessions");
+ let _projectKey = extractProjectKeyFromRequest(req);
+ let userId = extractUserIdFromRequest(req);
let liveSessions = {};
- let rooms = await io.of('/').adapter.allRooms();
+ let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey === req.params.projectKey) {
+ if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(sessionId);
+ if (userId) {
+ const connected_sockets = await io.in(peerId).fetchSockets();
+ for (let item of connected_sockets) {
+ if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
+ } else {
+ liveSessions[projectKey].push(sessionId);
+ }
}
}
- let result = {"data": liveSessions[req.params.projectKey] || []};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
- }
+ respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
+ let userId = extractUserIdFromRequest(req);
let liveSessions = {};
- let rooms = await io.of('/').adapter.allRooms();
+ let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
@@ -94,51 +165,48 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ if (userId) {
+ if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
+ } else {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
}
}
- liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
+ liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
- let result = {"data": liveSessions};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
- }
+ respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
- if (process.env.uws === "true") {
- req.params = {projectKey: req.getParameter(0)};
- }
- debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
+ debug && console.log("[WS]looking for available LIVE sessions");
+ let _projectKey = extractProjectKeyFromRequest(req);
+ let userId = extractUserIdFromRequest(req);
let liveSessions = {};
- let rooms = await io.of('/').adapter.allRooms();
+ let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey === req.params.projectKey) {
+ if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ if (userId) {
+ if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
+ } else {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
}
}
- liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
+ liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
- let result = {"data": liveSessions[req.params.projectKey] || []};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
- }
+ respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@@ -219,35 +287,13 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
- start: (server) => {
- if (process.env.uws !== "true") {
- io = _io(server, {
- maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
- cors: {
- origin: "*",
- methods: ["GET", "POST", "PUT"]
- },
- path: '/socket'
- });
- } else {
- io = new _io.Server({
- maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
- cors: {
- origin: "*",
- methods: ["GET", "POST", "PUT"]
- },
- path: '/socket',
- // transports: ['websocket'],
- // upgrade: false
- });
- io.attachApp(server);
- }
-
+ start: (server, prefix) => {
+ createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
socket.identity = socket.handshake.query.identity;
- let {projectKey, sessionId} = extractPeerId(socket.peerId);
+ const {projectKey, sessionId} = extractPeerId(socket.peerId);
socket.sessionId = sessionId;
socket.projectKey = projectKey;
socket.lastMessageReceivedAt = Date.now();
diff --git a/ee/utilities/servers/websocket.js b/ee/utilities/servers/websocket.js
index e087dba31..0bd397d96 100644
--- a/ee/utilities/servers/websocket.js
+++ b/ee/utilities/servers/websocket.js
@@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
-var {extractPeerId} = require('./peerjs-server');
-var wsRouter = express.Router();
+const {extractPeerId} = require('./peerjs-server');
+const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
-// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
-let debug = process.env.debug === "1" || false;
+const debug = process.env.debug === "1" || false;
-const socketsList = function (req, res) {
- debug && console.log("[WS]looking for all available sessions");
- let liveSessions = {};
- for (let peerId of io.sockets.adapter.rooms.keys()) {
- let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey !== undefined) {
- liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(sessionId);
- }
+const createSocketIOServer = function (server, prefix) {
+ if (process.env.uws !== "true") {
+ io = _io(server, {
+ maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
+ cors: {
+ origin: "*",
+ methods: ["GET", "POST", "PUT"]
+ },
+ path: (prefix ? prefix : '') + '/socket'
+ });
+ } else {
+ io = new _io.Server({
+ maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
+ cors: {
+ origin: "*",
+ methods: ["GET", "POST", "PUT"]
+ },
+ path: (prefix ? prefix : '') + '/socket'
+ // transports: ['websocket'],
+ // upgrade: false
+ });
+ io.attachApp(server);
}
- let result = {"data": liveSessions};
+}
+
+const extractUserIdFromRequest = function (req) {
+ if (process.env.uws === "true") {
+ if (req.getQuery("userId")) {
+ debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
+ return req.getQuery("userId");
+ }
+ } else if (req.query.userId) {
+ debug && console.log(`[WS]where userId=${req.query.userId}`);
+ return req.query.userId;
+ }
+ return undefined;
+}
+
+const extractProjectKeyFromRequest = function (req) {
+ if (process.env.uws === "true") {
+ if (req.getParameter(0)) {
+ debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
+ return req.getParameter(0);
+ }
+ } else if (req.params.projectKey) {
+ debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
+ return req.params.projectKey;
+ }
+ return undefined;
+}
+
+
+const getAvailableRooms = async function () {
+ return io.sockets.adapter.rooms.keys();
+}
+
+const respond = function (res, data) {
+ let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@@ -36,84 +82,111 @@ const socketsList = function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
-wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
-const socketsListByProject = function (req, res) {
- if (process.env.uws === "true") {
- req.params = {projectKey: req.getParameter(0)};
- }
- debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
+const socketsList = async function (req, res) {
+ debug && console.log("[WS]looking for all available sessions");
+ let userId = extractUserIdFromRequest(req);
+
let liveSessions = {};
- for (let peerId of io.sockets.adapter.rooms.keys()) {
+ let rooms = await getAvailableRooms();
+ for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey === req.params.projectKey) {
+ if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(sessionId);
+ if (userId) {
+ const connected_sockets = await io.in(peerId).fetchSockets();
+ for (let item of connected_sockets) {
+ if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
+ } else {
+ liveSessions[projectKey].push(sessionId);
+ }
}
}
- let result = {"data": liveSessions[req.params.projectKey] || []};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
+ respond(res, liveSessions);
+}
+wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
+
+const socketsListByProject = async function (req, res) {
+ debug && console.log("[WS]looking for available sessions");
+ let _projectKey = extractProjectKeyFromRequest(req);
+ let userId = extractUserIdFromRequest(req);
+ let liveSessions = {};
+ let rooms = await getAvailableRooms();
+ for (let peerId of rooms) {
+ let {projectKey, sessionId} = extractPeerId(peerId);
+ if (projectKey === _projectKey) {
+ liveSessions[projectKey] = liveSessions[projectKey] || [];
+ if (userId) {
+ const connected_sockets = await io.in(peerId).fetchSockets();
+ for (let item of connected_sockets) {
+ if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
+ } else {
+ liveSessions[projectKey].push(sessionId);
+ }
+ }
}
+ respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
+ let userId = extractUserIdFromRequest(req);
let liveSessions = {};
- for (let peerId of io.sockets.adapter.rooms.keys()) {
+ let rooms = await getAvailableRooms();
+ for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ if (userId) {
+ if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
+ } else {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
}
}
}
}
- let result = {"data": liveSessions};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
- }
+ respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
- if (process.env.uws === "true") {
- req.params = {projectKey: req.getParameter(0)};
- }
- debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
+ debug && console.log("[WS]looking for available LIVE sessions");
+ let _projectKey = extractProjectKeyFromRequest(req);
+ let userId = extractUserIdFromRequest(req);
let liveSessions = {};
- for (let peerId of io.sockets.adapter.rooms.keys()) {
+ let rooms = await getAvailableRooms();
+ for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
- if (projectKey === req.params.projectKey) {
+ if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
- liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ if (userId) {
+ if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
+ } else {
+ liveSessions[projectKey].push(item.handshake.query.sessionInfo);
+ }
}
}
}
}
- let result = {"data": liveSessions[req.params.projectKey] || []};
- if (process.env.uws !== "true") {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'application/json');
- res.end(JSON.stringify(result));
- } else {
- res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
- }
+ respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@@ -192,29 +265,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
- start: (server) => {
- if (process.env.uws !== "true") {
- io = _io(server, {
- maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
- cors: {
- origin: "*",
- methods: ["GET", "POST", "PUT"]
- },
- path: '/socket'
- });
- } else {
- io = new _io.Server({
- maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
- cors: {
- origin: "*",
- methods: ["GET", "POST", "PUT"]
- },
- path: '/socket',
- // transports: ['websocket'],
- // upgrade: false
- });
- io.attachApp(server);
- }
+ start: (server, prefix) => {
+ createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@@ -285,10 +337,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
- debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
+ debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
- debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
+ debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@@ -302,7 +354,7 @@ module.exports = {
});
console.log("WS server started")
- setInterval((io) => {
+ setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);
diff --git a/frontend/.gitignore b/frontend/.gitignore
index 92150a232..dfcb0fd79 100644
--- a/frontend/.gitignore
+++ b/frontend/.gitignore
@@ -8,3 +8,4 @@ app/components/ui/SVG.js
*.DS_Store
.env
*css.d.ts
+*.cache
diff --git a/frontend/app/components/BugFinder/BugFinder.js b/frontend/app/components/BugFinder/BugFinder.js
index 6d30359f1..326a1e78e 100644
--- a/frontend/app/components/BugFinder/BugFinder.js
+++ b/frontend/app/components/BugFinder/BugFinder.js
@@ -13,7 +13,8 @@ import withLocationHandlers from "HOCs/withLocationHandlers";
import { fetch as fetchFilterVariables } from 'Duck/sources';
import { fetchSources } from 'Duck/customField';
import { RehydrateSlidePanel } from './WatchDogs/components';
-import { setActiveTab, setFunnelPage } from 'Duck/sessions';
+import { setFunnelPage } from 'Duck/sessions';
+import { setActiveTab } from 'Duck/search';
import SessionsMenu from './SessionsMenu/SessionsMenu';
import { LAST_7_DAYS } from 'Types/app/period';
import { resetFunnel } from 'Duck/funnels';
@@ -51,12 +52,12 @@ const allowedQueryKeys = [
variables: state.getIn([ 'customFields', 'list' ]),
sources: state.getIn([ 'customFields', 'sources' ]),
filterValues: state.get('filterValues'),
- activeTab: state.getIn([ 'sessions', 'activeTab' ]),
favoriteList: state.getIn([ 'sessions', 'favoriteList' ]),
currentProjectId: state.getIn([ 'user', 'siteId' ]),
sites: state.getIn([ 'site', 'list' ]),
watchdogs: state.getIn(['watchdogs', 'list']),
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
+ sessions: state.getIn([ 'sessions', 'list' ]),
}), {
fetchFavoriteSessionList,
applyFilter,
@@ -91,7 +92,9 @@ export default class BugFinder extends React.PureComponent {
// keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
// };
// });
- props.fetchSessions();
+ if (props.sessions.size === 0) {
+ props.fetchSessions();
+ }
props.resetFunnel();
props.resetFunnelFilters();
props.fetchFunnelsList(LAST_7_DAYS)
@@ -115,7 +118,6 @@ export default class BugFinder extends React.PureComponent {
}
render() {
- const { activeFlow, activeTab } = this.props;
const { showRehydratePanel } = this.state;
return (
diff --git a/frontend/app/components/BugFinder/SessionList/SessionList.js b/frontend/app/components/BugFinder/SessionList/SessionList.js
index 10db59c5b..f5152222a 100644
--- a/frontend/app/components/BugFinder/SessionList/SessionList.js
+++ b/frontend/app/components/BugFinder/SessionList/SessionList.js
@@ -1,7 +1,7 @@
import { connect } from 'react-redux';
-import { Loader, NoContent, Button, LoadMoreButton } from 'UI';
+import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI';
import { applyFilter, addAttribute, addEvent } from 'Duck/filters';
-import { fetchSessions, addFilterByKeyAndValue } from 'Duck/search';
+import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search';
import SessionItem from 'Shared/SessionItem';
import SessionListHeader from './SessionListHeader';
import { FilterKey } from 'Types/filter/filterType';
@@ -15,17 +15,20 @@ var timeoutId;
shouldAutorefresh: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 0,
savedFilters: state.getIn([ 'filters', 'list' ]),
loading: state.getIn([ 'sessions', 'loading' ]),
- activeTab: state.getIn([ 'sessions', 'activeTab' ]),
+ activeTab: state.getIn([ 'search', 'activeTab' ]),
allList: state.getIn([ 'sessions', 'list' ]),
total: state.getIn([ 'sessions', 'total' ]),
filters: state.getIn([ 'search', 'instance', 'filters' ]),
metaList: state.getIn(['customFields', 'list']).map(i => i.key),
+ currentPage: state.getIn([ 'search', 'currentPage' ]),
+ lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]),
}), {
applyFilter,
addAttribute,
addEvent,
fetchSessions,
addFilterByKeyAndValue,
+ updateCurrentPage,
})
export default class SessionList extends React.PureComponent {
state = {
@@ -76,6 +79,8 @@ export default class SessionList extends React.PureComponent {
clearTimeout(timeoutId)
}
+
+
renderActiveTabContent(list) {
const {
loading,
@@ -84,6 +89,9 @@ export default class SessionList extends React.PureComponent {
allList,
activeTab,
metaList,
+ currentPage,
+ total,
+ lastPlayedSessionId,
} = this.props;
const _filterKeys = filters.map(i => i.key);
const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID);
@@ -93,49 +101,47 @@ export default class SessionList extends React.PureComponent {
return (
-
Please try changing your search parameters.
- {allList.size > 0 && (
-
- However, we found other sessions based on your search parameters.
-
-
+
+
Please try changing your search parameters.
+ {allList.size > 0 && (
+
+ However, we found other sessions based on your search parameters.
+
+
+
-
- )}
-
+ )}
+
}
>
- { list.take(displayedCount).map(session => (
+ { list.map(session => (
))}
-
- Haven't found the session in the above list? Try being a bit more specific by setting a specific time frame or simply use different filters
-
- }
- />
+