Merge branch 'dev' into assist-chat

This commit is contained in:
ShiKhu 2022-03-18 20:18:44 +01:00
commit f25d63c4d9
65 changed files with 3020 additions and 1138 deletions

View file

@ -47,7 +47,7 @@ jobs:
#
# Getting the images to build
#
git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt
git diff --name-only HEAD HEAD~1 | grep backend/services | cut -d '/' -f3 | uniq > backend/images_to_build.txt
[[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0)
#
# Pushing image to registry

View file

@ -102,11 +102,9 @@ def Build(a):
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
full_args, query_part= sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -64,9 +64,12 @@ def get_live_sessions(project_id, filters=None):
return helper.list_to_camel_case(results)
def get_live_sessions_ws(project_id):
def get_live_sessions_ws(project_id, user_id=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)

View file

@ -1,7 +1,8 @@
import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@ -398,79 +399,104 @@ def get_details_chart(project_id, error_id, user_id, **data):
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
ch_sub_query = [f"{project_key} =%(project_id)s"]
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == 'mobile':
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == 'desktop':
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
@dev.timed
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id")
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True)
pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-30)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now(1)
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_status=data.status)
if len(statuses) == 0:
return {"data": {
'total': 0,
'errors': []
}}
error_ids = [e["error_id"] for e in statuses]
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
density = data.get("density", 7)
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.get("sort") is not None:
sort = __get_sort_key(data["sort"])
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.get("order") is not None:
order = data["order"]
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
main_pg_query = f"""\
SELECT error_id,
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
@ -478,19 +504,23 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
last_occurrence,
first_occurrence,
chart
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
@ -500,7 +530,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
@ -508,16 +538,14 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
total = cur.rowcount
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
row = cur.fetchone()
rows = []
limit = 200
while row is not None and len(rows) < limit:
rows.append(row)
row = cur.fetchone()
if total == 0:
rows = []
else:
@ -537,15 +565,16 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@ -581,7 +610,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
error = get(error_id=error_id)
error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":

View file

@ -97,7 +97,55 @@ def __get_data_for_extend(data):
return data["data"]
def __pg_errors_query(source=None):
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
@ -120,30 +168,6 @@ def __pg_errors_query(source=None):
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
cur.mogrify(__pg_errors_query(source,
value_length=len(value) \
if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None):
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);""",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
sub_from.append(
f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)")
if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
def __generic_query(typename):
return f"""\
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5)"""
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(__generic_query(event.ui_type),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
cur.execute(
cur.mogrify(
__generic_query(event.ui_type,
value_length=len(value) \
if SUPPORTED_TYPES[event.ui_type].change_by_length \
else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
return helper.list_to_camel_case(cur.fetchall())
return f
@ -263,142 +360,96 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
query=__generic_query(typename=event_type.CLICK.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
query=__generic_query(typename=event_type.INPUT.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
query=__generic_query(typename=event_type.LOCATION.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
query=__generic_query(typename=event_type.CUSTOM.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
query=__generic_query(typename=event_type.REQUEST.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
value_limit=3,
starts_with="/",
starts_limit=4,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
query=__generic_query(typename=event_type.STATEACTION.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None,
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None, change_by_length=True),
}
def __get_merged_queries(queries, value, project_id):
if len(queries) == 0:
return []
now = TimeUTC.now()
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}")
return results
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
FROM (SELECT project_id, type, value
FROM (SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(svalue)s
UNION
SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(value)s) AS u
WHERE Row_ID <= 5) AS sfa
ORDER BY sfa.type;""",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
query = cur.mogrify("UNION ALL".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
def search_pg2(text, event_type, project_id, source, key):
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
if event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)

View file

@ -1,4 +1,5 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
# def filter_stages(stages):
# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_ios, schemas.EventType.input_ios,
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
events = []
for e in f_events:
if e.operator is None:
e.operator = schemas.SearchEventOperator._is
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
return events
def __transform_old_funnels(events):
for e in events:
@ -28,7 +55,7 @@ def __transform_old_funnels(events):
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
# filter.events = filter_stages(stages=filter.events)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
@ -76,9 +103,12 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
@ -102,9 +132,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
# row["filter"]["events"] = filter_stages(row["filter"]["events"])
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
@ -168,7 +198,8 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
# data.events = filter_stages(data.events)
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
@ -192,17 +223,18 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data):
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
if len(insights) > 0:
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
@ -220,25 +252,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
@dev.timed
def get_issues_on_the_fly(funnel_id, user_id, project_id, data):
first_stage = data.get("firstStage")
last_stage = data.get("lastStage")
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
last_stage=last_stage))}
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True):
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@ -260,7 +293,11 @@ def get(funnel_id, project_id, user_id, flatten=True):
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
# f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
@ -279,7 +316,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None

View file

@ -168,10 +168,11 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False,
error_status="ALL", count_only=False, issue=None):
full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id,
user_id)
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
@ -198,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = "DESC"
else:
data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})"
else:
sort = 'start_ts'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions)
@ -206,51 +218,51 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
MIN(full_sessions.start_ts) AS first_session_ts,
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
GROUP BY user_id
ORDER BY user_sessions_count DESC) AS users_sessions;""",
ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
) AS filtred_sessions
) AS full_sessions
GROUP BY user_id
) AS users_sessions;""",
full_args)
else:
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""",
full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
cur.execute(main_query)
if count_only:
return helper.dict_to_camel_case(cur.fetchone())
cur.execute(main_query)
if errors_only:
return helper.list_to_camel_case(cur.fetchall())
sessions = cur.fetchone()
if count_only:
return helper.dict_to_camel_case(sessions)
total = sessions["count"]
sessions = sessions["sessions"]
# sessions = []
# total = cur.rowcount
# row = cur.fetchone()
# limit = 200
# while row is not None and len(sessions) < limit:
# if row.get("favorite"):
# limit += 1
# sessions.append(row)
# row = cur.fetchone()
if errors_only:
return sessions
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
@ -281,9 +293,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
@ -378,8 +390,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
fav_only_join = ""
if favorite_only and not errors_only:
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
extra_constraints.append("fs.user_id = %(userId)s")
full_args["userId"] = user_id
# extra_constraints.append("fs.user_id = %(userId)s")
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@ -958,24 +969,24 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# elif data.platform == schemas.PlatformType.desktop:
# extra_constraints.append(
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
if error_status != "ALL":
extra_constraints.append("ser.project_id = %(project_id)s")
if error_status != schemas.ErrorStatus.all:
extra_constraints.append("ser.status = %(error_status)s")
full_args["status"] = error_status.lower()
full_args["error_status"] = error_status
if favorite_only:
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(user_id)s")
extra_constraints.append("ufe.user_id = %(userId)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
if not favorite_only and not errors_only and user_id is not None:
if favorite_only and not errors_only and user_id is not None:
extra_from += """INNER JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)"""
elif not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
@ -1003,7 +1014,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
{extra_from}
WHERE
{" AND ".join(extra_constraints)}"""
return full_args, query_part, sort
return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
@ -1102,48 +1113,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date):
return helper.list_to_camel_case(rows)
def get_favorite_sessions(project_id, user_id, include_viewed=False):
with pg_client.PostgresClient() as cur:
query_part = cur.mogrify(f"""\
FROM public.sessions AS s
LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id
WHERE fs.user_id = %(userId)s""",
{"projectId": project_id, "userId": user_id}
)
extra_query = b""
if include_viewed:
extra_query = cur.mogrify(""",\
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS viewed""",
{"projectId": project_id, "userId": user_id})
cur.execute(f"""\
SELECT s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
TRUE AS favorite
{extra_query.decode('UTF-8')}
{query_part.decode('UTF-8')}
ORDER BY s.session_id
LIMIT 50;""")
sessions = cur.fetchall()
return helper.list_to_camel_case(sessions)
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
@ -1196,11 +1165,11 @@ def get_session_user(project_id, user_id):
"public".sessions
WHERE
project_id = %(project_id)s
AND user_id = %(user_id)s
AND user_id = %(userId)s
AND duration is not null
GROUP BY user_id;
""",
{"project_id": project_id, "user_id": user_id}
{"project_id": project_id, "userId": user_id}
)
cur.execute(query=query)
data = cur.fetchone()
@ -1213,8 +1182,8 @@ def get_session_ids_by_user_ids(project_id, user_ids):
"""\
SELECT session_id FROM public.sessions
WHERE
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
{"project_id": project_id, "user_id": tuple(user_ids)}
project_id = %(project_id)s AND user_id IN %(userId)s;""",
{"project_id": project_id, "userId": tuple(user_ids)}
)
ids = cur.execute(query=query)
return ids
@ -1240,8 +1209,8 @@ def delete_sessions_by_user_ids(project_id, user_ids):
"""\
DELETE FROM public.sessions
WHERE
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
{"project_id": project_id, "user_id": tuple(user_ids)}
project_id = %(project_id)s AND user_id IN %(userId)s;""",
{"project_id": project_id, "userId": tuple(user_ids)}
)
cur.execute(query=query)

View file

@ -80,32 +80,41 @@ def get_top_key_values(project_id):
return helper.dict_to_CAPITAL_keys(row)
def __generic_query(typename):
return f"""\
SELECT value, type
FROM ((SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f""" (SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION
(SELECT value, type
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)) AS met"""
LIMIT 5);"""
return f""" SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(__generic_query(typename),
query = cur.mogrify(__generic_query(typename,
value_length=len(text) \
if SUPPORTED_TYPES[typename].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
@ -120,124 +129,73 @@ SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
value_limit=5,
starts_with="/",
starts_limit=5,
ignore_if_starts_with=[]),
change_by_length=True),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
}
@ -247,6 +205,7 @@ def search(text, meta_type, project_id):
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
# for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}

View file

@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
for i, s in enumerate(stages):
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
if s.get("operator") is None:
s["operator"] = "is"
@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
is_any = sessions._isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
i += 1
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
) AS issues_t

View file

@ -6,10 +6,7 @@ class Event:
class SupportedFilter:
def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with):
def __init__(self, get, query, change_by_length):
self.get = get
self.query = query
self.valueLimit = value_limit
self.startsWith = starts_with
self.startsLimit = starts_limit
self.ignoreIfStartsWith = ignore_if_starts_with
self.change_by_length = change_by_length

View file

@ -21,13 +21,6 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/sessions2/favorite', tags=["sessions"])
def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True)
}
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
@ -126,14 +119,14 @@ def events_search(projectId: int, q: str,
else:
return {"data": []}
result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key)
result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key)
return result
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search2_pg(data, projectId, user_id=context.user_id)
data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext =
return {'data': sessions_metas.get_top_key_values(projectId)}
@app.get('/{projectId}/sessions/filters/search', tags=["sessions"])
def get_session_filters_meta(projectId: int, q: str, type: str,
context: schemas.CurrentContext = Depends(OR_context)):
meta_type = type
if len(meta_type) == 0:
return {"data": []}
if len(q) == 0:
return {"data": []}
return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
@ -716,7 +698,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())
data=data)
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
@ -731,7 +713,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())}
data=data)}
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
@ -755,10 +737,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None:
return {"errors": ["issue not found"]}
return {
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
start_date=startDate,
end_date=endDate),
start_date=startDate, end_date=endDate),
"issue": issue}}
@ -837,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, user_id=userId)
return {'data': data}
@ -901,13 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False,
data: schemas.SearchErrorsSchema = Body(...),
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(favorite, str):
favorite = True if len(favorite) == 0 else False
return errors.search(data.dict(), projectId, user_id=context.user_id, status=status,
favorite_only=favorite)
return errors.search(data, projectId, user_id=context.user_id)
@app.get('/{projectId}/errors/stats', tags=['errors'])

View file

@ -83,15 +83,6 @@ class EditSlackSchema(BaseModel):
url: HttpUrl = Field(...)
class SearchErrorsSchema(BaseModel):
platform: Optional[str] = Field(None)
startDate: Optional[int] = Field(TimeUTC.now(-7))
endDate: Optional[int] = Field(TimeUTC.now())
density: Optional[int] = Field(7)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CreateNotificationSchema(BaseModel):
token: str = Field(...)
notifications: List = Field(...)
@ -609,11 +600,12 @@ class SessionsSearchPayloadSchema(BaseModel):
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(default="startTs")
order: str = Field(default="DESC")
order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
bookmarked: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
@ -662,6 +654,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
@root_validator(pre=True)
def enforce_default_values(cls, values):
@ -694,6 +687,27 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
class ErrorStatus(str, Enum):
all = 'all'
unresolved = 'unresolved'
resolved = 'resolved'
ignored = 'ignored'
class ErrorSort(str, Enum):
occurrence = 'occurrence'
users_count = 'users'
sessions_count = 'sessions'
class SearchErrorsSchema(SessionsSearchPayloadSchema):
sort: ErrorSort = Field(default=ErrorSort.occurrence)
density: Optional[int] = Field(7)
status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
query: Optional[str] = Field(default=None)
class MetricPayloadSchema(BaseModel):

View file

@ -9,16 +9,17 @@ import (
func getSessionKey(sessionID uint64) string {
// Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID
return strconv.FormatUint(sessionID>>50, 10)
return strconv.FormatUint(sessionID>>50, 10)
}
func ResolveURL(baseurl string, rawurl string) string {
rawurl = strings.Trim(rawurl, " ")
if !isRelativeCachable(rawurl) {
return rawurl
}
base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
return rawurl
}
return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl)
@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string {
}
func GetCachePathForAssets(sessionID uint64, rawurl string) string {
return getCachePathWithKey(sessionID, rawurl)
return getCachePathWithKey(sessionID, rawurl)
}
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string {
fullURL, cachable := GetFullCachableURL(baseURL, relativeURL)
if !cachable {
return fullURL
}
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
}
return u.String()
}

View file

@ -1,8 +1,9 @@
import json
import schemas
from chalicelib.core import dashboard
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import ch_client
from chalicelib.utils import ch_client, metrics_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data):
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE pe.error_id = fe.error_id
AND fe.user_id = %(user_id)s), FALSE) AS favorite,
AND fe.user_id = %(userId)s), FALSE) AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data):
AND error_id = %(error_id)s
ORDER BY start_ts DESC
LIMIT 1;""",
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
{"project_id": project_id, "error_id": error_id, "userId": user_id})
cur.execute(query=query)
status = cur.fetchone()
@ -423,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if time_constraint:
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
if platform == 'mobile':
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == 'desktop':
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@ -437,60 +438,280 @@ def __get_step_size(startTimestamp, endTimestamp, density):
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
ch_sub_query = __get_basic_constraints(data.get('platform'))
ch_sub_query.append("source ='js_exception'")
def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-30)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now(1)
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_ids = [e["error_id"] for e in statuses]
error_status=data.status)
if len(statuses) == 0:
return {"data": {
'total': 0,
'errors': []
}}
with ch_client.ClickHouseClient() as ch:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
density = data.get("density", 7)
step_size = __get_step_size(data["startDate"], data["endDate"], density)
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.get("sort") is not None:
sort = __get_sort_key(data["sort"])
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.get("order") is not None:
order = data["order"]
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
chart
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
}
# refactor this function after clickhouse structure changes (missing search by query)
def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
ch_sub_query = __get_basic_constraints(platform)
ch_sub_query.append("source ='js_exception'")
statuses = []
error_ids = None
# Clickhouse keeps data for the past month only, so no need to search beyond that
if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = __get_step_size(data.startDate, data.endDate, data.density)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.order is not None:
order = data.order
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if data.bookmarked:
cur.execute(cur.mogrify(f"""SELECT error_id
FROM public.user_favorite_errors
WHERE user_id = %(userId)s
{"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
{"userId": user_id, "error_ids": tuple(error_ids or [])}))
error_ids = cur.fetchall()
if len(error_ids) == 0:
return empty_response
error_ids = [e["error_id"] for e in error_ids]
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
ch_sub_query.append("error_id IN %(error_ids)s")
main_ch_query = f"""\
SELECT COUNT(DISTINCT error_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)};"""
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
total = ch.execute(query=main_ch_query, params=params)[0]["count"]
if flows:
return {"data": {"count": total}}
@ -510,9 +731,10 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
@ -523,35 +745,36 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
# print("--------------------")
# print(main_ch_query % params)
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
rows = ch.execute(query=main_ch_query, params=params)
if len(statuses) == 0:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"userId": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@ -565,9 +788,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
r["chart"] = list(r["chart"])
for i in range(len(r["chart"])):
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"],
end_time=data["endDate"],
density=density, neutral={"count": 0})
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
end_time=data.endDate,
density=data.density, neutral={"count": 0})
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
@ -593,7 +816,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
error = get(error_id=error_id)
error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":
@ -766,7 +989,7 @@ def format_first_stack_frame(error):
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
@ -780,7 +1003,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
{"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()

View file

@ -0,0 +1,25 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -723,6 +723,22 @@ $$
CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
@ -1018,7 +1034,7 @@ $$
CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE TABLE IF NOT EXISTS events.state_actions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,

View file

@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers
var express = require('express');
const {ExpressPeerServer} = require('peer');
var socket;
if (process.env.cluster === "true") {
if (process.env.redis === "true") {
console.log("Using Redis");
socket = require("./servers/websocket-cluster");
} else {

View file

@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader;
const {extractPeerId} = require('./peerjs-server');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
var wsRouter = express.Router();
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
let io;
const debug = process.env.debug === "1" || false;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
}
const uniqueSessions = function (data) {
let resArr = [];
@ -36,18 +58,40 @@ const uniqueSessions = function (data) {
return resArr;
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
let result = {"data": liveSessions};
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
let rooms = await io.of('/').adapter.allRooms();
return rooms;
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -56,37 +100,64 @@ const socketsList = async function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
@ -94,51 +165,48 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -219,35 +287,13 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
socket.identity = socket.handshake.query.identity;
let {projectKey, sessionId} = extractPeerId(socket.peerId);
const {projectKey, sessionId} = extractPeerId(socket.peerId);
socket.sessionId = sessionId;
socket.projectKey = projectKey;
socket.lastMessageReceivedAt = Date.now();

View file

@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
var {extractPeerId} = require('./peerjs-server');
var wsRouter = express.Router();
const {extractPeerId} = require('./peerjs-server');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
let debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1" || false;
const socketsList = function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
}
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
let result = {"data": liveSessions};
}
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -36,84 +82,111 @@ const socketsList = function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -192,29 +265,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@ -285,10 +337,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@ -302,7 +354,7 @@ module.exports = {
});
console.log("WS server started")
setInterval((io) => {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);

View file

@ -13,7 +13,8 @@ import withLocationHandlers from "HOCs/withLocationHandlers";
import { fetch as fetchFilterVariables } from 'Duck/sources';
import { fetchSources } from 'Duck/customField';
import { RehydrateSlidePanel } from './WatchDogs/components';
import { setActiveTab, setFunnelPage } from 'Duck/sessions';
import { setFunnelPage } from 'Duck/sessions';
import { setActiveTab } from 'Duck/search';
import SessionsMenu from './SessionsMenu/SessionsMenu';
import { LAST_7_DAYS } from 'Types/app/period';
import { resetFunnel } from 'Duck/funnels';
@ -51,12 +52,12 @@ const allowedQueryKeys = [
variables: state.getIn([ 'customFields', 'list' ]),
sources: state.getIn([ 'customFields', 'sources' ]),
filterValues: state.get('filterValues'),
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
favoriteList: state.getIn([ 'sessions', 'favoriteList' ]),
currentProjectId: state.getIn([ 'user', 'siteId' ]),
sites: state.getIn([ 'site', 'list' ]),
watchdogs: state.getIn(['watchdogs', 'list']),
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
sessions: state.getIn([ 'sessions', 'list' ]),
}), {
fetchFavoriteSessionList,
applyFilter,
@ -91,7 +92,9 @@ export default class BugFinder extends React.PureComponent {
// keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
// };
// });
props.fetchSessions();
if (props.sessions.size === 0) {
props.fetchSessions();
}
props.resetFunnel();
props.resetFunnelFilters();
props.fetchFunnelsList(LAST_7_DAYS)
@ -115,7 +118,6 @@ export default class BugFinder extends React.PureComponent {
}
render() {
const { activeFlow, activeTab } = this.props;
const { showRehydratePanel } = this.state;
return (

View file

@ -1,7 +1,7 @@
import { connect } from 'react-redux';
import { Loader, NoContent, Button, LoadMoreButton } from 'UI';
import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI';
import { applyFilter, addAttribute, addEvent } from 'Duck/filters';
import { fetchSessions, addFilterByKeyAndValue } from 'Duck/search';
import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search';
import SessionItem from 'Shared/SessionItem';
import SessionListHeader from './SessionListHeader';
import { FilterKey } from 'Types/filter/filterType';
@ -15,17 +15,20 @@ var timeoutId;
shouldAutorefresh: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 0,
savedFilters: state.getIn([ 'filters', 'list' ]),
loading: state.getIn([ 'sessions', 'loading' ]),
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
allList: state.getIn([ 'sessions', 'list' ]),
total: state.getIn([ 'sessions', 'total' ]),
filters: state.getIn([ 'search', 'instance', 'filters' ]),
metaList: state.getIn(['customFields', 'list']).map(i => i.key),
currentPage: state.getIn([ 'search', 'currentPage' ]),
lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]),
}), {
applyFilter,
addAttribute,
addEvent,
fetchSessions,
addFilterByKeyAndValue,
updateCurrentPage,
})
export default class SessionList extends React.PureComponent {
state = {
@ -76,6 +79,8 @@ export default class SessionList extends React.PureComponent {
clearTimeout(timeoutId)
}
renderActiveTabContent(list) {
const {
loading,
@ -84,6 +89,9 @@ export default class SessionList extends React.PureComponent {
allList,
activeTab,
metaList,
currentPage,
total,
lastPlayedSessionId,
} = this.props;
const _filterKeys = filters.map(i => i.key);
const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID);
@ -93,49 +101,47 @@ export default class SessionList extends React.PureComponent {
return (
<NoContent
title={this.getNoContentMessage(activeTab)}
subtext="Please try changing your search parameters."
// subtext="Please try changing your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
subtext={
<div>
<div>Please try changing your search parameters.</div>
{allList.size > 0 && (
<div className="pt-2">
However, we found other sessions based on your search parameters.
<div>
<Button
plain
onClick={() => onMenuItemClick({ name: 'All', type: 'all' })}
>See All</Button>
<div>
<div>Please try changing your search parameters.</div>
{allList.size > 0 && (
<div className="pt-2">
However, we found other sessions based on your search parameters.
<div>
<Button
plain
onClick={() => onMenuItemClick({ name: 'All', type: 'all' })}
>See All</Button>
</div>
</div>
</div>
)}
</div>
)}
</div>
}
>
<Loader loading={ loading }>
{ list.take(displayedCount).map(session => (
{ list.map(session => (
<SessionItem
key={ session.sessionId }
session={ session }
hasUserFilter={hasUserFilter}
onUserClick={this.onUserClick}
metaList={metaList}
lastPlayedSessionId={lastPlayedSessionId}
/>
))}
</Loader>
<LoadMoreButton
className="mt-12 mb-12"
displayedCount={displayedCount}
totalCount={list.size}
loading={loading}
onClick={this.addPage}
description={ displayedCount === list.size &&
<div className="color-gray-medium text-sm text-center my-3">
Haven't found the session in the above list? <br/>Try being a bit more specific by setting a specific time frame or simply use different filters
</div>
}
/>
<div className="w-full flex items-center justify-center py-6">
<Pagination
page={currentPage}
totalPages={Math.ceil(total / PER_PAGE)}
onPageChange={(page) => this.props.updateCurrentPage(page)}
limit={PER_PAGE}
debounceRequest={1000}
/>
</div>
</NoContent>
);
}

View file

@ -64,5 +64,5 @@ function SessionListHeader({
};
export default connect(state => ({
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
}), { applyFilter })(SessionListHeader);

View file

@ -1,30 +1,20 @@
import React, { useEffect } from 'react'
import React from 'react'
import { connect } from 'react-redux';
import cn from 'classnames';
import { SideMenuitem, SavedSearchList, Progress, Popup, Icon, CircularLoader } from 'UI'
import { SideMenuitem, SavedSearchList, Progress, Popup } from 'UI'
import stl from './sessionMenu.css';
import { fetchWatchdogStatus } from 'Duck/watchdogs';
import { setActiveFlow, clearEvents } from 'Duck/filters';
import { setActiveTab } from 'Duck/sessions';
import { clearEvents } from 'Duck/filters';
import { issues_types } from 'Types/session/issue'
import { fetchList as fetchSessionList } from 'Duck/sessions';
function SessionsMenu(props) {
const {
activeFlow, activeTab, watchdogs = [], keyMap, wdTypeCount,
fetchWatchdogStatus, toggleRehydratePanel, filters, sessionsLoading } = props;
const { activeTab, keyMap, wdTypeCount, toggleRehydratePanel } = props;
const onMenuItemClick = (filter) => {
props.onMenuItemClick(filter)
if (activeFlow && activeFlow.type === 'flows') {
props.setActiveFlow(null)
}
}
// useEffect(() => {
// fetchWatchdogStatus()
// }, [])
const capturingAll = props.captureRate && props.captureRate.get('captureAll');
@ -66,36 +56,13 @@ function SessionsMenu(props) {
{ issues_types.filter(item => item.visible).map(item => (
<SideMenuitem
key={item.key}
disabled={!keyMap[item.type] && !wdTypeCount[item.type]}
// disabled={!keyMap[item.type] && !wdTypeCount[item.type]}
active={activeTab.type === item.type}
title={item.name} iconName={item.icon}
onClick={() => onMenuItemClick(item)}
/>
))}
{/* <div className={stl.divider} />
<div className="my-3">
<SideMenuitem
title={
<div className="flex items-center">
<div>Assist</div>
{ activeTab.type === 'live' && (
<div
className="ml-4 h-5 w-6 flex items-center justify-center"
onClick={() => !sessionsLoading && props.fetchSessionList(filters.toJS())}
>
{ sessionsLoading ? <CircularLoader className="ml-1" /> : <Icon name="sync-alt" size="14" />}
</div>
)}
</div>
}
iconName="person"
active={activeTab.type === 'live'}
onClick={() => onMenuItemClick({ name: 'Assist', type: 'live' })}
/>
</div> */}
<div className={stl.divider} />
<div className="my-3">
<SideMenuitem
@ -113,13 +80,12 @@ function SessionsMenu(props) {
}
export default connect(state => ({
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
keyMap: state.getIn([ 'sessions', 'keyMap' ]),
wdTypeCount: state.getIn([ 'sessions', 'wdTypeCount' ]),
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
captureRate: state.getIn(['watchdogs', 'captureRate']),
filters: state.getIn([ 'filters', 'appliedFilter' ]),
sessionsLoading: state.getIn([ 'sessions', 'fetchLiveListRequest', 'loading' ]),
}), {
fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab, fetchSessionList
fetchWatchdogStatus, clearEvents, fetchSessionList
})(SessionsMenu);

View file

@ -1,23 +1,19 @@
import { connect } from 'react-redux';
import withSiteIdRouter from 'HOCs/withSiteIdRouter';
import withPermissions from 'HOCs/withPermissions'
import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo";
import { getRE } from 'App/utils';
import { fetchBookmarks } from "Duck/errors";
import { UNRESOLVED, RESOLVED, IGNORED, BOOKMARK } from "Types/errorInfo";
import { fetchBookmarks, editOptions } from "Duck/errors";
import { applyFilter } from 'Duck/filters';
import { fetchList as fetchSlackList } from 'Duck/integrations/slack';
import { errors as errorsRoute, isRoute } from "App/routes";
import EventFilter from 'Components/BugFinder/EventFilter';
import DateRange from 'Components/BugFinder/DateRange';
import withPageTitle from 'HOCs/withPageTitle';
import { SavedSearchList } from 'UI';
import cn from 'classnames';
import List from './List/List';
import ErrorInfo from './Error/ErrorInfo';
import Header from './Header';
import SideMenuSection from './SideMenu/SideMenuSection';
import SideMenuHeader from './SideMenu/SideMenuHeader';
import SideMenuDividedItem from './SideMenu/SideMenuDividedItem';
const ERRORS_ROUTE = errorsRoute();
@ -39,44 +35,26 @@ function getStatusLabel(status) {
@withSiteIdRouter
@connect(state => ({
list: state.getIn([ "errors", "list" ]),
status: state.getIn([ "errors", "options", "status" ]),
}), {
fetchBookmarks,
applyFilter,
fetchSlackList,
editOptions,
})
@withPageTitle("Errors - OpenReplay")
export default class Errors extends React.PureComponent {
state = {
status: UNRESOLVED,
bookmarksActive: false,
currentList: this.props.list.filter(e => e.status === UNRESOLVED),
filter: '',
constructor(props) {
super(props)
this.state = {
filter: '',
}
}
componentDidMount() {
this.props.fetchSlackList(); // Delete after implementing cache
}
onFilterChange = ({ target: { value } }) => this.setState({ filter: value })
componentDidUpdate(prevProps, prevState) {
const { bookmarksActive, status, filter } = this.state;
const { list } = this.props;
if (prevProps.list !== list
|| prevState.status !== status
|| prevState.bookmarksActive !== bookmarksActive
|| prevState.filter !== filter) {
const unfiltered = bookmarksActive
? list
: list.filter(e => e.status === status);
const filterRE = getRE(filter);
this.setState({
currentList: unfiltered
.filter(e => filterRE.test(e.name) || filterRE.test(e.message)),
})
}
}
ensureErrorsPage() {
const { history } = this.props;
if (!isRoute(ERRORS_ROUTE, history.location.pathname)) {
@ -85,22 +63,11 @@ export default class Errors extends React.PureComponent {
}
onStatusItemClick = ({ key }) => {
if (this.state.bookmarksActive) {
this.props.applyFilter();
}
this.setState({
status: key,
bookmarksActive: false,
});
this.ensureErrorsPage();
this.props.editOptions({ status: key });
}
onBookmarksClick = () => {
this.setState({
bookmarksActive: true,
});
this.props.fetchBookmarks();
this.ensureErrorsPage();
this.props.editOptions({ status: BOOKMARK });
}
@ -110,12 +77,14 @@ export default class Errors extends React.PureComponent {
match: {
params: { errorId }
},
status,
list,
history,
} = this.props;
const { status, bookmarksActive, currentList } = this.state;
return (
<div className="page-margin container-90" >
<div className="side-menu">
<div className={cn("side-menu", {'disabled' : !isRoute(ERRORS_ROUTE, history.location.pathname)})}>
<SideMenuSection
title="Errors"
onItemClick={this.onStatusItemClick}
@ -137,14 +106,14 @@ export default class Errors extends React.PureComponent {
icon: "ban",
label: getStatusLabel(IGNORED),
active: status === IGNORED,
}
}
]}
/>
<SideMenuDividedItem
className="mt-3 mb-4"
iconName="star"
title="Bookmarks"
active={ bookmarksActive }
active={ status === BOOKMARK }
onClick={ this.onBookmarksClick }
/>
</div>
@ -154,8 +123,8 @@ export default class Errors extends React.PureComponent {
<>
<div className="mb-5 flex">
<Header
text={ bookmarksActive ? "Bookmarks" : getStatusLabel(status) }
count={ currentList.size }
text={ status === BOOKMARK ? "Bookmarks" : getStatusLabel(status) }
count={ list.size }
/>
<div className="ml-3 flex items-center">
<span className="mr-2 color-gray-medium">Seen in</span>
@ -164,12 +133,11 @@ export default class Errors extends React.PureComponent {
</div>
<List
status={ status }
list={ currentList }
onFilterChange={this.onFilterChange}
list={ list }
/>
</>
:
<ErrorInfo errorId={ errorId } list={ currentList } />
<ErrorInfo errorId={ errorId } list={ list } />
}
</div>
</div>

View file

@ -1,53 +1,62 @@
import cn from 'classnames';
import { connect } from 'react-redux';
import { Set, List as ImmutableList } from "immutable";
import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain } from 'UI';
import { merge, resolve, unresolve, ignore, updateCurrentPage } from "Duck/errors";
import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain, Pagination } from 'UI';
import { merge, resolve, unresolve, ignore, updateCurrentPage, editOptions } from "Duck/errors";
import { applyFilter } from 'Duck/filters';
import { IGNORED, RESOLVED, UNRESOLVED } from 'Types/errorInfo';
import SortDropdown from 'Components/BugFinder/Filters/SortDropdown';
import Divider from 'Components/Errors/ui/Divider';
import ListItem from './ListItem/ListItem';
import { debounce } from 'App/utils';
const PER_PAGE = 5;
const DEFAULT_SORT = 'lastOccurrence';
const DEFAULT_ORDER = 'desc';
const PER_PAGE = 10;
const sortOptionsMap = {
'lastOccurrence-desc': 'Last Occurrence',
'firstOccurrence-desc': 'First Occurrence',
'sessions-asc': 'Sessions Ascending',
'sessions-desc': 'Sessions Descending',
'users-asc': 'Users Ascending',
'users-desc': 'Users Descending',
'occurrence-desc': 'Last Occurrence',
'occurrence-desc': 'First Occurrence',
'sessions-asc': 'Sessions Ascending',
'sessions-desc': 'Sessions Descending',
'users-asc': 'Users Ascending',
'users-desc': 'Users Descending',
};
const sortOptions = Object.entries(sortOptionsMap)
.map(([ value, text ]) => ({ value, text }));
@connect(state => ({
loading: state.getIn([ "errors", "loading" ]),
resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) ||
state.getIn(["errors", "unresolve", "loading"]),
ignoreLoading: state.getIn([ "errors", "ignore", "loading" ]),
mergeLoading: state.getIn([ "errors", "merge", "loading" ]),
currentPage: state.getIn(["errors", "currentPage"]),
currentPage: state.getIn(["errors", "currentPage"]),
total: state.getIn([ 'errors', 'totalCount' ]),
sort: state.getIn([ 'errors', 'options', 'sort' ]),
order: state.getIn([ 'errors', 'options', 'order' ]),
query: state.getIn([ "errors", "options", "query" ]),
}), {
merge,
resolve,
unresolve,
ignore,
applyFilter,
updateCurrentPage,
updateCurrentPage,
editOptions,
})
export default class List extends React.PureComponent {
state = {
checkedAll: false,
checkedIds: Set(),
sort: {}
constructor(props) {
super(props)
this.state = {
checkedAll: false,
checkedIds: Set(),
query: props.query,
}
this.debounceFetch = debounce(this.props.editOptions, 1000);
}
componentDidMount() {
this.props.applyFilter({ sort: DEFAULT_SORT, order: DEFAULT_ORDER, events: ImmutableList(), filters: ImmutableList() });
if (this.props.list.size === 0) {
this.props.applyFilter({ });
}
}
check = ({ errorId }) => {
@ -111,8 +120,14 @@ export default class List extends React.PureComponent {
writeOption = (e, { name, value }) => {
const [ sort, order ] = value.split('-');
const sign = order === 'desc' ? -1 : 1;
this.setState({ sort: { sort, order }})
if (name === 'sort') {
this.props.editOptions({ sort, order });
}
}
onQueryChange = (e, { value }) => {
this.setState({ query: value });
this.debounceFetch({ query: value });
}
render() {
@ -123,19 +138,18 @@ export default class List extends React.PureComponent {
ignoreLoading,
resolveToggleLoading,
mergeLoading,
onFilterChange,
currentPage,
currentPage,
total,
sort,
order,
} = this.props;
const {
checkedAll,
checkedIds,
sort
query,
} = this.state;
const someLoading = loading || ignoreLoading || resolveToggleLoading || mergeLoading;
const currentCheckedIds = this.currentCheckedIds();
const displayedCount = Math.min(currentPage * PER_PAGE, list.size);
let _list = sort.sort ? list.sortBy(i => i[sort.sort]) : list;
_list = sort.order === 'desc' ? _list.reverse() : _list;
return (
<div className="bg-white p-5 border-radius-3 thin-gray-border">
@ -182,33 +196,35 @@ export default class List extends React.PureComponent {
}
</div>
<div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium">Sort By</span>
<span className="mr-2 color-gray-medium">Sort By</span>
<DropdownPlain
name="type"
options={ sortOptions }
onChange={ this.writeOption }
/>
<Input
defaultValue={ `${sort}-${order}` }
name="sort"
options={ sortOptions }
onChange={ this.writeOption }
/>
<Input
style={{ width: '350px'}}
className="input-small ml-3"
placeholder="Filter by Name or Message"
icon="search"
iconPosition="left"
name="filter"
onChange={ onFilterChange }
/>
</div>
</div>
<Divider />
<NoContent
title="No Errors Found!"
subtext="Please try to change your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
>
<Loader loading={ loading }>
{ _list.take(displayedCount).map(e =>
<>
className="input-small ml-3"
placeholder="Filter by Name or Message"
icon="search"
iconPosition="left"
name="filter"
onChange={ this.onQueryChange }
value={query}
/>
</div>
</div>
<Divider />
<NoContent
title="No Errors Found!"
subtext="Please try to change your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
>
<Loader loading={ loading }>
{ list.map(e =>
<div key={e.errorId}>
<ListItem
disabled={someLoading}
key={e.errorId}
@ -217,16 +233,19 @@ export default class List extends React.PureComponent {
onCheck={ this.check }
/>
<Divider/>
</>
</div>
)}
<LoadMoreButton
className="mt-3"
displayedCount={displayedCount}
totalCount={list.size}
onClick={this.addPage}
/>
</Loader>
</NoContent>
<div className="w-full flex items-center justify-center mt-4">
<Pagination
page={currentPage}
totalPages={Math.ceil(total / PER_PAGE)}
onPageChange={(page) => this.props.updateCurrentPage(page)}
limit={PER_PAGE}
debounceRequest={500}
/>
</div>
</Loader>
</NoContent>
</div>
);
}

View file

@ -6,9 +6,19 @@ import { connect } from 'react-redux';
import { setActiveStages } from 'Duck/funnels';
import { Styles } from '../../Dashboard/Widgets/common';
import { numberWithCommas } from 'App/utils'
import { truncate } from 'App/utils'
const MIN_BAR_HEIGHT = 20;
function CustomTick(props) {
const { x, y, payload } = props;
return (
<g transform={`translate(${x},${y})`}>
<text x={0} y={0} dy={16} fontSize={12} textAnchor="middle" fill="#666">{payload.value}</text>
</g>
);
}
function FunnelGraph(props) {
const { data, activeStages, funnelId, liveFilters } = props;
const [activeIndex, setActiveIndex] = useState(activeStages)
@ -118,13 +128,29 @@ function FunnelGraph(props) {
)
}
const CustomTooltip = ({ active, payload, msg = '' }) => {
const CustomTooltip = (props) => {
const { payload } = props;
if (payload.length === 0) return null;
const { value, headerText } = payload[0].payload;
// const value = payload[0].payload.value;
if (!value) return null;
return (
<div className="rounded border bg-white p-2">
<p className="text-sm">{msg}</p>
<div className="rounded border bg-white p-2">
<div>{headerText}</div>
{value.map(i => (
<div className="text-sm ml-2">{truncate(i, 30)}</div>
))}
</div>
);
)
};
// const CustomTooltip = ({ active, payload, msg = '' }) => {
// return (
// <div className="rounded border bg-white p-2">
// <p className="text-sm">{msg}</p>
// </div>
// );
// };
const TEMP = {}
@ -152,7 +178,9 @@ function FunnelGraph(props) {
background={'transparent'}
>
<CartesianGrid strokeDasharray="1 3" stroke="#BBB" vertical={false} />
{activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />}
{/* {activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} */}
<Tooltip cursor={{ fill: 'transparent' }} content={CustomTooltip} />
<Bar
dataKey="sessionsCount"
onClick={handleClick}
@ -210,7 +238,8 @@ function FunnelGraph(props) {
dataKey="label"
strokeWidth={0}
interval={0}
tick ={{ fill: '#666', fontSize: 12 }}
// tick ={{ fill: '#666', fontSize: 12 }}
tick={<CustomTick />}
xAxisId={0}
/>
{/* <XAxis

View file

@ -1,7 +1,7 @@
import React, { useEffect, useState } from 'react';
import { Icon, BackLink, IconButton, Dropdown, Popup, TextEllipsis, Button } from 'UI';
import { remove as deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered } from 'Duck/funnels';
import { editFilter, addFilter } from 'Duck/funnels';
import { editFilter, refresh, addFilter } from 'Duck/funnels';
import DateRange from 'Shared/DateRange';
import { connect } from 'react-redux';
import { confirm } from 'UI/Confirmation';
@ -19,16 +19,11 @@ const Info = ({ label = '', value = '', className = 'mx-4' }) => {
const FunnelHeader = (props) => {
const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props;
const [showSaveModal, setShowSaveModal] = useState(false)
const writeOption = (e, { name, value }) => {
props.fetch(value).then(() => {
props.fetchInsights(value, {})
props.fetchIssuesFiltered(value, {})
props.fetchSessionsFiltered(value, {})
props.redirect(value)
})
props.redirect(value)
props.fetch(value).then(() => props.refresh(value))
}
const deleteFunnel = async (e, funnel) => {
@ -45,11 +40,12 @@ const FunnelHeader = (props) => {
}
const onDateChange = (e) => {
props.editFilter(e, funnel.funnelId);
props.editFilter(e, funnelId);
}
const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS();
const selectedFunnel = funnels.filter(i => i.funnelId === parseInt(funnelId)).first() || {};
const eventsCount = funnel.filter.filters.filter(i => i.isEvent).size;
return (
<div>
@ -76,7 +72,7 @@ const FunnelHeader = (props) => {
selectOnBlur={false}
icon={ <Icon name="chevron-down" color="gray-dark" size="14" className={stl.dropdownIcon} /> }
/>
<Info label="Events" value={funnel.filter.filters.size} />
<Info label="Events" value={eventsCount} />
<span>-</span>
<Button plain onClick={props.toggleFilters}>{ showFilters ? 'HIDE' : 'EDIT FUNNEL' }</Button>
<Info label="Sessions" value={insights.sessionsCount} />
@ -114,4 +110,4 @@ const FunnelHeader = (props) => {
export default connect(state => ({
funnel: state.getIn([ 'funnels', 'instance' ]),
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered })(FunnelHeader)
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader)

View file

@ -9,6 +9,7 @@ import Controls from './Controls';
import Overlay from './Overlay';
import stl from './player.css';
import EventsToggleButton from '../../Session/EventsToggleButton';
import { updateLastPlayedSession } from 'Duck/sessions';
@connectPlayer(state => ({
live: state.live,
@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton';
return {
fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]),
nextId: state.getIn([ 'sessions', 'nextId' ]),
sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]),
closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])),
}
}, {
hideTargetDefiner,
fullscreenOff,
updateLastPlayedSession,
})
export default class Player extends React.PureComponent {
screenWrapper = React.createRef();
componentDidMount() {
this.props.updateLastPlayedSession(this.props.sessionId);
if (this.props.closedLive) return;
const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture

View file

@ -1,7 +1,7 @@
import React, { useEffect } from 'react';
import { fetchLiveList } from 'Duck/sessions';
import { connect } from 'react-redux';
import { NoContent, Loader, LoadMoreButton } from 'UI';
import { NoContent, Loader, LoadMoreButton, Pagination } from 'UI';
import { List, Map } from 'immutable';
import SessionItem from 'Shared/SessionItem';
import withPermissions from 'HOCs/withPermissions'
@ -12,11 +12,11 @@ import { addFilterByKeyAndValue, updateCurrentPage, updateSort } from 'Duck/live
import DropdownPlain from 'Shared/DropdownPlain';
import SortOrderButton from 'Shared/SortOrderButton';
import { TimezoneDropdown } from 'UI';
import { capitalize } from 'App/utils';
import { capitalize, sliceListPerPage } from 'App/utils';
import LiveSessionReloadButton from 'Shared/LiveSessionReloadButton';
const AUTOREFRESH_INTERVAL = .5 * 60 * 1000
const PER_PAGE = 20;
const PER_PAGE = 10;
interface Props {
loading: Boolean,
@ -42,9 +42,8 @@ function LiveSessionList(props: Props) {
text: capitalize(i), value: i
})).toJS();
const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size);
const addPage = () => props.updateCurrentPage(props.currentPage + 1)
// const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size);
// const addPage = () => props.updateCurrentPage(props.currentPage + 1)
useEffect(() => {
if (filters.size === 0) {
@ -135,6 +134,7 @@ function LiveSessionList(props: Props) {
<SortOrderButton onChange={(state) => props.updateSort({ order: state })} sortOrder={sort.order} />
</div>
</div>
<NoContent
title={"No live sessions."}
subtext={
@ -147,9 +147,9 @@ function LiveSessionList(props: Props) {
show={ !loading && sessions && sessions.size === 0}
>
<Loader loading={ loading }>
{sessions && sessions.sortBy(i => i.metadata[sort.field]).update(list => {
{sessions && sliceListPerPage(sessions.sortBy(i => i.metadata[sort.field]).update(list => {
return sort.order === 'desc' ? list.reverse() : list;
}).take(displayedCount).map(session => (
}), currentPage - 1).map(session => (
<SessionItem
key={ session.sessionId }
session={ session }
@ -160,12 +160,14 @@ function LiveSessionList(props: Props) {
/>
))}
<LoadMoreButton
className="my-6"
displayedCount={displayedCount}
totalCount={sessions.size}
onClick={addPage}
<div className="w-full flex items-center justify-center py-6">
<Pagination
page={currentPage}
totalPages={Math.ceil(sessions.size / PER_PAGE)}
onPageChange={(page) => props.updateCurrentPage(page)}
limit={PER_PAGE}
/>
</div>
</Loader>
</NoContent>
</div>

View file

@ -3,29 +3,25 @@ import cn from 'classnames';
import {
Link,
Icon,
OsIcon,
BrowserIcon,
CountryFlag,
Avatar,
TextEllipsis,
Label,
} from 'UI';
import { deviceTypeIcon } from 'App/iconNames';
import { toggleFavorite, setSessionPath } from 'Duck/sessions';
import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes';
import { durationFormatted, formatTimeOrDate } from 'App/date';
import stl from './sessionItem.css';
import LiveTag from 'Shared/LiveTag';
import Bookmark from 'Shared/Bookmark';
import Counter from './Counter'
import { withRouter } from 'react-router-dom';
import SessionMetaList from './SessionMetaList';
import ErrorBars from './ErrorBars';
import { assist as assistRoute, liveSession, isRoute } from "App/routes";
import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes";
import { capitalize } from 'App/utils';
const ASSIST_ROUTE = assistRoute();
const ASSIST_LIVE_SESSION = liveSession()
const SESSIONS_ROUTE = sessionsRoute();
// const Label = ({ label = '', color = 'color-gray-medium'}) => (
// <div className={ cn('font-light text-sm', color)}>{label}</div>
@ -69,10 +65,13 @@ export default class SessionItem extends React.PureComponent {
disableUser = false,
metaList = [],
showActive = false,
lastPlayedSessionId,
} = this.props;
const formattedDuration = durationFormatted(duration);
const hasUserId = userId || userAnonymousId;
const isSessions = isRoute(SESSIONS_ROUTE, this.props.location.pathname);
const isAssist = isRoute(ASSIST_ROUTE, this.props.location.pathname) || isRoute(ASSIST_LIVE_SESSION, this.props.location.pathname);
const isLastPlayed = lastPlayedSessionId === sessionId;
const _metaList = Object.keys(metadata).filter(i => metaList.includes(i)).map(key => {
const value = metadata[key];
@ -125,7 +124,7 @@ export default class SessionItem extends React.PureComponent {
</span>
</div>
</div>
{ !isAssist && (
{ isSessions && (
<div style={{ width: "10%"}} className="self-center px-2 flex items-center">
<ErrorBars count={issueTypes.length} />
</div>
@ -139,6 +138,15 @@ export default class SessionItem extends React.PureComponent {
</Label>
)}
<div className={ stl.playLink } id="play-button" data-viewed={ viewed }>
{ isSessions && (
<div className="mr-4 flex-shrink-0 w-24">
{ isLastPlayed && (
<Label className="bg-gray-lightest p-1 px-2 rounded-lg">
<span className="color-gray-medium text-xs" style={{ whiteSpace: 'nowrap'}}>LAST PLAYED</span>
</Label>
)}
</div>
)}
<Link to={ isAssist ? liveSessionRoute(sessionId) : sessionRoute(sessionId) }>
<Icon name={ !viewed && !isAssist ? 'play-fill' : 'play-circle-light' } size="42" color={isAssist ? "tealx" : "teal"} />
</Link>

View file

@ -21,7 +21,7 @@ function DropdownPlain({ name, label, options, onChange, defaultValue, wrapperSt
options={ options }
onChange={ onChange }
defaultValue={ defaultValue || options[ 0 ].value }
icon={null}
// icon={null}
disabled={disabled}
icon={ <Icon name="chevron-down" color="gray-dark" size="14" className={stl.dropdownIcon} /> }
/>

View file

@ -0,0 +1,77 @@
import React from 'react'
import { Icon } from 'UI'
import cn from 'classnames'
import { debounce } from 'App/utils';
import { Tooltip } from 'react-tippy';
interface Props {
page: number
totalPages: number
onPageChange: (page: number) => void
limit?: number
debounceRequest?: number
}
export default function Pagination(props: Props) {
const { page, totalPages, onPageChange, limit = 5, debounceRequest = 0 } = props;
const [currentPage, setCurrentPage] = React.useState(page);
React.useMemo(
() => setCurrentPage(page),
[page],
);
const debounceChange = React.useCallback(debounce(onPageChange, debounceRequest), []);
const changePage = (page: number) => {
if (page > 0 && page <= totalPages) {
setCurrentPage(page);
debounceChange(page);
}
}
const isFirstPage = currentPage === 1;
const isLastPage = currentPage === totalPages;
return (
<div className="flex items-center">
<Tooltip
arrow
sticky
title="Previous Page"
trigger="mouseenter"
hideOnClick={true}
>
<button
className={cn("py-2 px-3", { "opacity-50 cursor-default": isFirstPage })}
disabled={isFirstPage}
onClick={() => changePage(currentPage - 1)}
>
<Icon name="chevron-left" size="18" color={isFirstPage ? 'gray-medium' : 'teal'} />
</button>
</Tooltip>
<span className="mr-2 color-gray-medium">Page</span>
<input
type="number"
className={cn("py-1 px-2 bg-white border border-gray-light rounded w-16", { "opacity-50 cursor-default": totalPages === 1 })}
value={currentPage}
min={1}
max={totalPages}
onChange={(e) => changePage(parseInt(e.target.value))}
/>
<span className="mx-3 color-gray-medium">of</span>
<span >{totalPages}</span>
<Tooltip
arrow
sticky
title="Next Page"
trigger="mouseenter"
hideOnClick={true}
>
<button
className={cn("py-2 px-3", { "opacity-50 cursor-default": isLastPage })}
disabled={isLastPage}
onClick={() => changePage(currentPage + 1)}
>
<Icon name="chevron-right" size="18" color={isLastPage ? 'gray-medium' : 'teal'} />
</button>
</Tooltip>
</div>
)
}

View file

@ -0,0 +1 @@
export { default } from './Pagination';

View file

@ -55,5 +55,6 @@ export { default as HighlightCode } from './HighlightCode';
export { default as NoPermission } from './NoPermission';
export { default as NoSessionPermission } from './NoSessionPermission';
export { default as HelpText } from './HelpText';
export { default as Pagination } from './Pagination';
export { Input, Modal, Form, Message, Card } from 'semantic-ui-react';

View file

@ -1,4 +1,4 @@
import { FilterKey } from 'Types/filter/filterType';
import { FilterKey, IssueType } from 'Types/filter/filterType';
export const options = [
{ key: 'on', text: 'on', value: 'on' },
@ -93,18 +93,18 @@ export const methodOptions = [
]
export const issueOptions = [
{ text: 'Click Rage', value: 'click_rage' },
{ text: 'Dead Click', value: 'dead_click' },
{ text: 'Excessive Scrolling', value: 'excessive_scrolling' },
{ text: 'Bad Request', value: 'bad_request' },
{ text: 'Missing Resource', value: 'missing_resource' },
{ text: 'Memory', value: 'memory' },
{ text: 'CPU', value: 'cpu' },
{ text: 'Slow Resource', value: 'slow_resource' },
{ text: 'Slow Page Load', value: 'slow_page_load' },
{ text: 'Crash', value: 'crash' },
{ text: 'Custom', value: 'custom' },
{ text: 'JS Exception', value: 'js_exception' },
{ text: 'Click Rage', value: IssueType.CLICK_RAGE },
{ text: 'Dead Click', value: IssueType.DEAD_CLICK },
{ text: 'Excessive Scrolling', value: IssueType.EXCESSIVE_SCROLLING },
{ text: 'Bad Request', value: IssueType.BAD_REQUEST },
{ text: 'Missing Resource', value: IssueType.MISSING_RESOURCE },
{ text: 'Memory', value: IssueType.MEMORY },
{ text: 'CPU', value: IssueType.CPU },
{ text: 'Slow Resource', value: IssueType.SLOW_RESOURCE },
{ text: 'Slow Page Load', value: IssueType.SLOW_PAGE_LOAD },
{ text: 'Crash', value: IssueType.CRASH },
{ text: 'Custom', value: IssueType.CUSTOM },
{ text: 'Error', value: IssueType.JS_EXCEPTION },
]
export default {

View file

@ -1,13 +1,18 @@
import { List, Map } from 'immutable';
import { clean as cleanParams } from 'App/api_client';
import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED } from 'Types/errorInfo';
import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED, BOOKMARK } from 'Types/errorInfo';
import { createFetch, fetchListType, fetchType } from './funcTools/crud';
import { createRequestReducer, ROOT_KEY } from './funcTools/request';
import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools';
import { reduceThenFetchResource } from './search'
const name = "error";
const idKey = "errorId";
const PER_PAGE = 10;
const DEFAULT_SORT = 'occurrence';
const DEFAULT_ORDER = 'desc';
const EDIT_OPTIONS = `${name}/EDIT_OPTIONS`;
const FETCH_LIST = fetchListType(name);
const FETCH = fetchType(name);
const FETCH_NEW_ERRORS_COUNT = fetchType('errors/FETCH_NEW_ERRORS_COUNT');
@ -18,6 +23,7 @@ const MERGE = "errors/MERGE";
const TOGGLE_FAVORITE = "errors/TOGGLE_FAVORITE";
const FETCH_TRACE = "errors/FETCH_TRACE";
const UPDATE_CURRENT_PAGE = "errors/UPDATE_CURRENT_PAGE";
const UPDATE_KEY = `${name}/UPDATE_KEY`;
function chartWrapper(chart = []) {
return chart.map(point => ({ ...point, count: Math.max(point.count, 0) }));
@ -35,13 +41,23 @@ const initialState = Map({
instanceTrace: List(),
stats: Map(),
sourcemapUploaded: true,
currentPage: 1,
currentPage: 1,
options: Map({
sort: DEFAULT_SORT,
order: DEFAULT_ORDER,
status: UNRESOLVED,
query: '',
}),
// sort: DEFAULT_SORT,
// order: DEFAULT_ORDER,
});
function reducer(state = initialState, action = {}) {
let updError;
switch (action.type) {
case EDIT_OPTIONS:
return state.mergeIn(["options"], action.instance);
case success(FETCH):
return state.set("instance", ErrorInfo(action.data));
case success(FETCH_TRACE):
@ -69,8 +85,10 @@ function reducer(state = initialState, action = {}) {
return state.update("list", list => list.filter(e => !ids.includes(e.errorId)));
case success(FETCH_NEW_ERRORS_COUNT):
return state.set('stats', action.data);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
case UPDATE_KEY:
return state.set(action.key, action.value);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
}
return state;
}
@ -106,14 +124,32 @@ export function fetchTrace(id) {
}
}
export function fetchList(params = {}, clear = false) {
return {
types: array(FETCH_LIST),
call: client => client.post('/errors/search', params),
clear,
params: cleanParams(params),
};
}
export const fetchList = (params = {}, clear = false) => (dispatch, getState) => {
params.page = getState().getIn(['errors', 'currentPage']);
params.limit = PER_PAGE;
const options = getState().getIn(['errors', 'options']).toJS();
if (options.status === BOOKMARK) {
options.bookmarked = true;
options.status = 'all';
}
return dispatch({
types: array(FETCH_LIST),
call: client => client.post('/errors/search', { ...params, ...options }),
clear,
params: cleanParams(params),
});
};
// export function fetchList(params = {}, clear = false) {
// return {
// types: array(FETCH_LIST),
// call: client => client.post('/errors/search', params),
// clear,
// params: cleanParams(params),
// };
// }
export function fetchBookmarks() {
return {
@ -169,9 +205,12 @@ export function fetchNewErrorsCount(params = {}) {
}
}
export function updateCurrentPage(page) {
return {
type: 'errors/UPDATE_CURRENT_PAGE',
export const updateCurrentPage = reduceThenFetchResource((page) => ({
type: UPDATE_CURRENT_PAGE,
page,
};
}
}));
export const editOptions = reduceThenFetchResource((instance) => ({
type: EDIT_OPTIONS,
instance
}));

View file

@ -7,7 +7,7 @@ import SavedFilter from 'Types/filter/savedFilter';
import { errors as errorsRoute, isRoute } from "App/routes";
import { fetchList as fetchSessionList } from './sessions';
import { fetchList as fetchErrorsList } from './errors';
import { FilterCategory, FilterKey } from 'Types/filter/filterType';
import { FilterCategory, FilterKey, IssueType } from 'Types/filter/filterType';
import { filtersMap, liveFiltersMap, generateFilterOptions, generateLiveFilterOptions } from 'Types/filter/newFilter';
const ERRORS_ROUTE = errorsRoute();
@ -28,6 +28,8 @@ const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`;
const UPDATE = `${name}/UPDATE`;
const APPLY = `${name}/APPLY`;
const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`;
const UPDATE_CURRENT_PAGE = `${name}/UPDATE_CURRENT_PAGE`;
const SET_ACTIVE_TAB = `${name}/SET_ACTIVE_TAB`;
const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS';
@ -49,6 +51,8 @@ const initialState = Map({
instance: new Filter({ filters: [] }),
savedSearch: new SavedFilter({}),
filterSearchList: {},
currentPage: 1,
activeTab: {name: 'All', type: 'all' },
});
// Metric - Series - [] - filters
@ -62,7 +66,7 @@ function reducer(state = initialState, action = {}) {
case APPLY:
return action.fromUrl
? state.set('instance', Filter(action.filter))
: state.mergeIn(['instance'], action.filter);
: state.mergeIn(['instance'], action.filter).set('currentPage', 1);
case success(FETCH):
return state.set("instance", action.data);
case success(FETCH_LIST):
@ -83,6 +87,10 @@ function reducer(state = initialState, action = {}) {
return state.set('savedSearch', action.filter);
case EDIT_SAVED_SEARCH:
return state.mergeIn([ 'savedSearch' ], action.instance);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
case SET_ACTIVE_TAB:
return state.set('activeTab', action.tab).set('currentPage', 1);
}
return state;
}
@ -118,10 +126,24 @@ export const filterMap = ({category, value, key, operator, sourceOperator, sourc
filters: filters ? filters.map(filterMap) : [],
});
const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
export const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
dispatch(actionCreator(...args));
const filter = getState().getIn([ 'search', 'instance']).toData();
const activeTab = getState().getIn([ 'search', 'activeTab']);
if (activeTab.type !== 'all' && activeTab.type !== 'bookmark') {
const tmpFilter = filtersMap[FilterKey.ISSUE];
tmpFilter.value = [activeTab.type]
filter.filters = filter.filters.concat(tmpFilter)
}
if (activeTab.type === 'bookmark') {
filter.bookmarked = true
}
filter.filters = filter.filters.map(filterMap);
filter.limit = 10;
filter.page = getState().getIn([ 'search', 'currentPage']);
return isRoute(ERRORS_ROUTE, window.location.pathname)
? dispatch(fetchErrorsList(filter))
@ -133,6 +155,11 @@ export const edit = reduceThenFetchResource((instance) => ({
instance,
}));
export const setActiveTab = reduceThenFetchResource((tab) => ({
type: SET_ACTIVE_TAB,
tab
}));
export const remove = (id) => (dispatch, getState) => {
return dispatch({
types: REMOVE.array,
@ -152,6 +179,11 @@ export const applyFilter = reduceThenFetchResource((filter, fromUrl=false) => ({
fromUrl,
}));
export const updateCurrentPage = reduceThenFetchResource((page) => ({
type: UPDATE_CURRENT_PAGE,
page,
}));
export const applySavedSearch = (filter) => (dispatch, getState) => {
dispatch(edit({ filters: filter ? filter.filter.filters : [] }));
return dispatch({

View file

@ -7,9 +7,9 @@ import withRequestState, { RequestTypes } from './requestStateCreator';
import { getRE } from 'App/utils';
import { LAST_7_DAYS } from 'Types/app/period';
import { getDateRangeFromValue } from 'App/dateRange';
const name = 'sessions';
const INIT = 'sessions/INIT';
const FETCH_LIST = new RequestTypes('sessions/FETCH_LIST');
const FETCH = new RequestTypes('sessions/FETCH');
const FETCH_FAVORITE_LIST = new RequestTypes('sessions/FETCH_FAVORITE_LIST');
@ -26,6 +26,7 @@ const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW';
const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG';
const SET_TIMELINE_POINTER = 'sessions/SET_TIMELINE_POINTER';
const SET_SESSION_PATH = 'sessions/SET_SESSION_PATH';
const LAST_PLAYED_SESSION_ID = `${name}/LAST_PLAYED_SESSION_ID`;
const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB';
@ -60,6 +61,7 @@ const initialState = Map({
funnelPage: Map(),
timelinePointer: null,
sessionPath: '',
lastPlayedSessionId: null,
});
const reducer = (state = initialState, action = {}) => {
@ -248,11 +250,21 @@ const reducer = (state = initialState, action = {}) => {
return state.set('timelinePointer', action.pointer);
case SET_SESSION_PATH:
return state.set('sessionPath', action.path);
case LAST_PLAYED_SESSION_ID:
return updateListItem(state, action.sessionId, { viewed: true }).set('lastPlayedSessionId', action.sessionId);
default:
return state;
}
};
function updateListItem(state, sourceSessionId, instance) {
const list = state.get('list');
const index = list.findIndex(({ sessionId }) => sessionId === sourceSessionId);
if (index === -1) return state;
return state.updateIn([ 'list', index ], session => session.merge(instance));
}
export default withRequestState({
_: [ FETCH, FETCH_LIST ],
fetchLiveListRequest: FETCH_LIVE_LIST,
@ -390,4 +402,11 @@ export function setSessionPath(path) {
type: SET_SESSION_PATH,
path
}
}
export function updateLastPlayedSession(sessionId) {
return {
type: LAST_PLAYED_SESSION_ID,
sessionId,
};
}

View file

@ -113,8 +113,15 @@ export default class DOMManager extends ListWalker<Message> {
logger.error("Node has no childNodes", this.nl[ parentID ]);
return;
}
if (this.nl[ id ] instanceof HTMLHtmlElement) {
// What if some exotic cases?
this.nl[ parentID ].replaceChild(this.nl[ id ], childNodes[childNodes.length-1])
return
}
this.nl[ parentID ]
.insertBefore(this.nl[ id ], childNodes[ index ]);
.insertBefore(this.nl[ id ], childNodes[ index ])
}
private applyMessage = (msg: Message): void => {
@ -257,14 +264,14 @@ export default class DOMManager extends ListWalker<Message> {
case "create_i_frame_document":
node = this.nl[ msg.frameID ];
// console.log('ifr', msg, node)
if (node instanceof HTMLIFrameElement) {
doc = node.contentDocument;
if (!doc) {
logger.warn("No iframe doc", msg, node, node.contentDocument);
return;
}
this.nl[ msg.id ] = doc.documentElement
this.nl[ msg.id ] = doc
return;
} else if (node instanceof Element) { // shadow DOM
try {

View file

@ -62,7 +62,7 @@
.color-white { color: $white }
.color-borderColor { color: $borderColor }
/* color */
/* hover color */
.hover-main:hover { color: $main }
.hover-gray-light-shade:hover { color: $gray-light-shade }
.hover-gray-lightest:hover { color: $gray-lightest }
@ -92,3 +92,33 @@
.hover-pink:hover { color: $pink }
.hover-white:hover { color: $white }
.hover-borderColor:hover { color: $borderColor }
.border-main { border-color: $main }
.border-gray-light-shade { border-color: $gray-light-shade }
.border-gray-lightest { border-color: $gray-lightest }
.border-gray-light { border-color: $gray-light }
.border-gray-medium { border-color: $gray-medium }
.border-gray-dark { border-color: $gray-dark }
.border-gray-darkest { border-color: $gray-darkest }
.border-teal { border-color: $teal }
.border-teal-dark { border-color: $teal-dark }
.border-teal-light { border-color: $teal-light }
.border-tealx { border-color: $tealx }
.border-tealx-light { border-color: $tealx-light }
.border-tealx-light-border { border-color: $tealx-light-border }
.border-orange { border-color: $orange }
.border-yellow { border-color: $yellow }
.border-yellow2 { border-color: $yellow2 }
.border-orange-dark { border-color: $orange-dark }
.border-green { border-color: $green }
.border-green2 { border-color: $green2 }
.border-green-dark { border-color: $green-dark }
.border-red { border-color: $red }
.border-red2 { border-color: $red2 }
.border-blue { border-color: $blue }
.border-blue2 { border-color: $blue2 }
.border-active-blue { border-color: $active-blue }
.border-active-blue-border { border-color: $active-blue-border }
.border-pink { border-color: $pink }
.border-white { border-color: $white }
.border-borderColor { border-color: $borderColor }

View file

@ -1,4 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-chevron-double-left" viewBox="0 0 16 16">
<svg xmlns="http://www.w3.org/2000/svg" fill="currentColor" class="bi bi-chevron-double-left" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M8.354 1.646a.5.5 0 0 1 0 .708L2.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
<path fill-rule="evenodd" d="M12.354 1.646a.5.5 0 0 1 0 .708L6.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
</svg>

Before

Width:  |  Height:  |  Size: 447 B

After

Width:  |  Height:  |  Size: 424 B

View file

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" class="bi bi-chevron-left" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
</svg>

After

Width:  |  Height:  |  Size: 246 B

View file

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" class="bi bi-chevron-right" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z"/>
</svg>

After

Width:  |  Height:  |  Size: 248 B

View file

@ -5,6 +5,7 @@ import Session from './session';
export const RESOLVED = "resolved";
export const UNRESOLVED = "unresolved";
export const IGNORED = "ignored";
export const BOOKMARK = "bookmark";
function getStck0InfoString(stack) {

View file

@ -8,6 +8,21 @@ export enum FilterCategory {
PERFORMANCE = "Performance",
};
export enum IssueType {
CLICK_RAGE = "click_rage",
DEAD_CLICK = "dead_click",
EXCESSIVE_SCROLLING = "excessive_scrolling",
BAD_REQUEST = "bad_request",
MISSING_RESOURCE = "missing_resource",
MEMORY = "memory",
CPU = "cpu",
SLOW_RESOURCE = "slow_resource",
SLOW_PAGE_LOAD = "slow_page_load",
CRASH = "crash",
CUSTOM = "custom",
JS_EXCEPTION = "js_exception",
}
export enum FilterType {
STRING = "STRING",
ISSUE = "ISSUE",

View file

@ -14,7 +14,10 @@ const getRedableName = ({ type, value, operator }) => {
break;
case "INPUT":
str = 'Entered';
break;
break;
case "CUSTOM":
str = 'Custom Event';
break;
}
return `${str} ${operator}`;
@ -52,7 +55,7 @@ export default Record({
},
fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => {
let _stages = stages.map((stage, index) => {
// stage.label = getRedableName(stage.type, stage.value);
stage.headerText = getRedableName(stage.type, stage.value);
stage.label = `Step ${index + 1}`;
return stage;
});
@ -73,7 +76,7 @@ export default Record({
...rest,
stages: _stages.length > 0 ? _stages.map((stage, index) => {
if (!stage) return;
// stage.label = getRedableName(stage);
stage.headerText = getRedableName(stage);
stage.label = `Step ${index + 1}`;
return stage;
}) : [],

View file

@ -232,4 +232,10 @@ export const isGreaterOrEqualVersion = (version, compareTo) => {
const [major, minor, patch] = version.split("-")[0].split('.');
const [majorC, minorC, patchC] = compareTo.split("-")[0].split('.');
return (major > majorC) || (major === majorC && minor > minorC) || (major === majorC && minor === minorC && patch >= patchC);
}
export const sliceListPerPage = (list, page, perPage = 10) => {
const start = page * perPage;
const end = start + perPage;
return list.slice(start, end);
}

View file

@ -12,7 +12,9 @@ ${ colors.map(color => `.fill-${ color } { fill: $${ color } }`).join('\n') }
/* color */
${ colors.map(color => `.color-${ color } { color: $${ color } }`).join('\n') }
/* color */
/* hover color */
${ colors.map(color => `.hover-${ color }:hover { color: $${ color } }`).join('\n') }
${ colors.map(color => `.border-${ color } { border-color: $${ color } }`).join('\n') }
`)

View file

@ -0,0 +1,25 @@
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -900,6 +900,23 @@ $$
CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed');
CREATE TYPE job_action AS ENUM ('delete_user_data');
CREATE TABLE jobs

View file

@ -19,7 +19,7 @@ spec:
- port: {{ .port }}
targetPort: {{ .targetPort }}
protocol: TCP
name: {{ .targetPort }}
name: {{ .name }}
{{- end }}
selector:
{{- include "nginx-ingress.selectorLabels" . | nindent 4 }}

View file

@ -42,8 +42,10 @@ service:
ports:
- port: 80
targetPort: http
name: http
- port: 443
targetPort: https
name: https
ingress:
enabled: false

View file

@ -100,7 +100,7 @@ utilities:
env:
debug: 0
uws: false
cluster: false
redis: false
# If you want to override something
# chartname:

View file

@ -1,4 +1,5 @@
node_modules
npm-debug.log
lib
cjs
.cache

View file

@ -1,18 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tracker_1 = require("@openreplay/tracker/cjs");
function default_1() {
return (app) => {
if (app === null) {
return (name) => (fn, thisArg) => thisArg === undefined ? fn : fn.bind(thisArg);
}
return (name) => (fn, thisArg) => (...args) => {
const startTime = performance.now();
const result = thisArg === undefined ? fn.apply(this, args) : fn.apply(thisArg, args);
const duration = performance.now() - startTime;
app.send(tracker_1.Messages.Profiler(name, duration, args.map(String).join(', '), String(result)));
return result;
};
};
}
exports.default = default_1;

View file

@ -1 +0,0 @@
{ "type": "commonjs" }

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-redux",
"description": "Tracker plugin for Redux state recording",
"version": "3.4.8",
"version": "3.5.0",
"keywords": [
"redux",
"logging",
@ -23,11 +23,11 @@
},
"dependencies": {},
"peerDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"redux": "^4.0.0"
},
"devDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"prettier": "^1.18.2",
"replace-in-files-cli": "^1.0.0",
"typescript": "^4.6.0-dev.20211126"

View file

@ -23,8 +23,11 @@ export default function(opts: Partial<Options> = {}) {
return () => next => action => next(action);
}
const encoder = new Encoder(sha1, 50);
app.attachStopCallback(() => {
encoder.clear()
})
return ({ getState }) => next => action => {
if (!options.actionFilter(action)) {
if (!app.active() || !options.actionFilter(action)) {
return next(action);
}
const startTime = performance.now();

View file

@ -5,7 +5,7 @@
"alwaysStrict": true,
"target": "es6",
"module": "es6",
"moduleResolution": "nodenext",
"moduleResolution": "node",
"declaration": true,
"outDir": "./lib"
}

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "3.5.3",
"version": "3.5.4",
"keywords": [
"logging",
"replay"

View file

@ -41,32 +41,57 @@ export function isInstance<T extends WindowConstructor>(node: Node, constr: Cons
// @ts-ignore (for EI, Safary)
doc.parentWindow ||
doc.defaultView; // TODO: smart global typing for Window object
while(context.parent && context.parent !== context) {
while((context.parent || context.top) && context.parent !== context) {
// @ts-ignore
if (node instanceof context[constr.name]) {
return true
}
// @ts-ignore
context = context.parent
context = context.parent || context.top
}
// @ts-ignore
return node instanceof context[constr.name]
}
export function inDocument(node: Node): boolean {
// TODO: ensure 1. it works in every cases (iframes/detached nodes) and 2. the most efficient
export function inDocument(node: Node) {
const doc = node.ownerDocument
if (!doc) { return false }
if (doc.contains(node)) { return true }
let context: Window =
// @ts-ignore (for EI, Safary)
doc.parentWindow ||
doc.defaultView;
while(context.parent && context.parent !== context) {
if (context.document.contains(node)) {
if (!doc) { return true } // Document
let current: Node | null = node
while(current) {
if (current === doc) {
return true
} else if(isInstance(current, ShadowRoot)) {
current = current.host
} else {
current = current.parentNode
}
// @ts-ignore
context = context.parent
}
return false;
return false
}
// export function inDocument(node: Node): boolean {
// // @ts-ignore compatability
// if (node.getRootNode) {
// let root: Node
// while ((root = node.getRootNode()) !== node) {
// ////
// }
// }
// const doc = node.ownerDocument
// if (!doc) { return false }
// if (doc.contains(node)) { return true }
// let context: Window =
// // @ts-ignore (for EI, Safary)
// doc.parentWindow ||
// doc.defaultView;
// while(context.parent && context.parent !== context) {
// if (context.document.contains(node)) {
// return true
// }
// // @ts-ignore
// context = context.parent
// }
// return false;
// }

View file

@ -1,4 +1,3 @@
import { hasOpenreplayAttribute } from "../../utils.js";
import {
RemoveNodeAttribute,
SetNodeAttribute,
@ -59,9 +58,7 @@ export default abstract class Observer {
private readonly indexes: Array<number> = [];
private readonly attributesList: Array<Set<string> | undefined> = [];
private readonly textSet: Set<number> = new Set();
private readonly inUpperContext: boolean;
constructor(protected readonly app: App, protected readonly context: Window = window) {
this.inUpperContext = context.parent === context //TODO: get rid of context here
constructor(protected readonly app: App, protected readonly isTopContext = false) {
this.observer = new MutationObserver(
this.app.safe((mutations) => {
for (const mutation of mutations) {
@ -226,7 +223,7 @@ export default abstract class Observer {
// Disable parent check for the upper context HTMLHtmlElement, because it is root there... (before)
// TODO: get rid of "special" cases (there is an issue with CreateDocument altered behaviour though)
// TODO: Clean the logic (though now it workd fine)
if (!isInstance(node, HTMLHtmlElement) || !this.inUpperContext) {
if (!isInstance(node, HTMLHtmlElement) || !this.isTopContext) {
if (parent === null) {
this.unbindNode(node);
return false;
@ -321,6 +318,8 @@ export default abstract class Observer {
for (let id = 0; id < this.recents.length; id++) {
// TODO: make things/logic nice here.
// commit required in any case if recents[id] true or false (in case of unbinding) or undefined (in case of attr change).
// Possible solution: separate new node commit (recents) and new attribute/move node commit
// Otherwise commitNode is called on each node, which might be a lot
if (!this.myNodes[id]) { continue }
this.commitNode(id);
if (this.recents[id] === true && (node = this.app.nodes.getNode(id))) {

View file

@ -6,7 +6,7 @@ import ShadowRootObserver from "./shadow_root_observer.js";
import { CreateDocument } from "../../../messages/index.js";
import App from "../index.js";
import { IN_BROWSER } from '../../utils.js'
import { IN_BROWSER, hasOpenreplayAttribute } from '../../utils.js'
export interface Options {
captureIFrames: boolean
@ -17,15 +17,16 @@ const attachShadowNativeFn = IN_BROWSER ? Element.prototype.attachShadow : ()=>n
export default class TopObserver extends Observer {
private readonly options: Options;
constructor(app: App, options: Partial<Options>) {
super(app);
super(app, true);
this.options = Object.assign({
captureIFrames: false
captureIFrames: true
}, options);
// IFrames
this.app.nodes.attachNodeCallback(node => {
if (isInstance(node, HTMLIFrameElement) &&
(this.options.captureIFrames || node.getAttribute("data-openreplay-capture"))
((this.options.captureIFrames && !hasOpenreplayAttribute(node, "obscured"))
|| hasOpenreplayAttribute(node, "capture"))
) {
this.handleIframe(node)
}
@ -42,26 +43,25 @@ export default class TopObserver extends Observer {
private iframeObservers: IFrameObserver[] = [];
private handleIframe(iframe: HTMLIFrameElement): void {
let context: Window | null = null
let doc: Document | null = null
const handle = this.app.safe(() => {
const id = this.app.nodes.getID(iframe)
if (id === undefined) { return } //log
if (iframe.contentWindow === context) { return } //Does this happen frequently?
context = iframe.contentWindow as Window | null;
if (!context) { return }
const observer = new IFrameObserver(this.app, context)
if (iframe.contentDocument === doc) { return } // How frequently can it happen?
doc = iframe.contentDocument
if (!doc || !iframe.contentWindow) { return }
const observer = new IFrameObserver(this.app)
this.iframeObservers.push(observer)
observer.observe(iframe)
})
this.app.attachEventListener(iframe, "load", handle)
iframe.addEventListener("load", handle) // why app.attachEventListener not working?
handle()
}
private shadowRootObservers: ShadowRootObserver[] = []
private handleShadowRoot(shRoot: ShadowRoot) {
const observer = new ShadowRootObserver(this.app, this.context)
const observer = new ShadowRootObserver(this.app)
this.shadowRootObservers.push(observer)
observer.observe(shRoot.host)
}
@ -81,9 +81,9 @@ export default class TopObserver extends Observer {
// the change in the re-player behaviour caused by CreateDocument message:
// the 0-node ("fRoot") will become #document rather than documentElement as it is now.
// Alternatively - observe(#document) then bindNode(documentElement)
this.observeRoot(this.context.document, () => {
this.observeRoot(window.document, () => {
this.app.send(new CreateDocument())
}, this.context.document.documentElement);
}, window.document.documentElement);
}
disconnect() {

View file

@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
var {extractPeerId} = require('./peerjs-server');
var wsRouter = express.Router();
const {extractPeerId} = require('./peerjs-server');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -12,83 +12,152 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
let debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1" || false;
const socketsList = function (req, res) {
const createSocketIOServer = function (server, prefix) {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
}
const extractUserIdFromRequest = function (req) {
if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": data}));
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions}));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = function (req, res) {
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions}));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -167,16 +236,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@ -247,10 +308,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@ -264,7 +325,7 @@ module.exports = {
});
console.log("WS server started")
setInterval((io) => {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);