Merge pull request #400 from openreplay/dev

Chore(release): v1.5.4
This commit is contained in:
Mehdi Osman 2022-03-30 17:56:15 +04:00 committed by GitHub
commit 2c77dbbe42
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
107 changed files with 4332 additions and 1530 deletions

View file

@ -102,11 +102,9 @@ def Build(a):
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
full_args, query_part= sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -64,14 +64,17 @@ def get_live_sessions(project_id, filters=None):
return helper.list_to_camel_case(results)
def get_live_sessions_ws(project_id):
def get_live_sessions_ws(project_id, user_id=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
try:
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
live_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Live-Assist response")
@ -101,12 +104,12 @@ def get_live_session_by_id(project_id, session_id):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
try:
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Assist response")

View file

@ -1,7 +1,8 @@
import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@ -398,79 +399,104 @@ def get_details_chart(project_id, error_id, user_id, **data):
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
ch_sub_query = [f"{project_key} =%(project_id)s"]
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == 'mobile':
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == 'desktop':
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
@dev.timed
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id")
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True)
pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-30)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now(1)
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_status=data.status)
if len(statuses) == 0:
return {"data": {
'total': 0,
'errors': []
}}
error_ids = [e["error_id"] for e in statuses]
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
density = data.get("density", 7)
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.get("sort") is not None:
sort = __get_sort_key(data["sort"])
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.get("order") is not None:
order = data["order"]
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
main_pg_query = f"""\
SELECT error_id,
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
@ -478,19 +504,23 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
last_occurrence,
first_occurrence,
chart
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
@ -500,7 +530,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
@ -508,16 +538,14 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
total = cur.rowcount
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
row = cur.fetchone()
rows = []
limit = 200
while row is not None and len(rows) < limit:
rows.append(row)
row = cur.fetchone()
if total == 0:
rows = []
else:
@ -537,15 +565,16 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@ -581,7 +610,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
error = get(error_id=error_id)
error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":

View file

@ -97,7 +97,55 @@ def __get_data_for_extend(data):
return data["data"]
def __pg_errors_query(source=None):
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
@ -120,30 +168,6 @@ def __pg_errors_query(source=None):
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
cur.mogrify(__pg_errors_query(source,
value_length=len(value) \
if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None):
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);""",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
sub_from.append(
f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)")
if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
def __generic_query(typename):
return f"""\
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5)"""
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(__generic_query(event.ui_type),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
cur.execute(
cur.mogrify(
__generic_query(event.ui_type,
value_length=len(value) \
if SUPPORTED_TYPES[event.ui_type].change_by_length \
else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
return helper.list_to_camel_case(cur.fetchall())
return f
@ -263,142 +360,96 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
query=__generic_query(typename=event_type.CLICK.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
query=__generic_query(typename=event_type.INPUT.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
query=__generic_query(typename=event_type.LOCATION.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
query=__generic_query(typename=event_type.CUSTOM.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
query=__generic_query(typename=event_type.REQUEST.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
value_limit=3,
starts_with="/",
starts_limit=4,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
query=__generic_query(typename=event_type.STATEACTION.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None,
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None, change_by_length=True),
}
def __get_merged_queries(queries, value, project_id):
if len(queries) == 0:
return []
now = TimeUTC.now()
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}")
return results
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
FROM (SELECT project_id, type, value
FROM (SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(svalue)s
UNION
SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(value)s) AS u
WHERE Row_ID <= 5) AS sfa
ORDER BY sfa.type;""",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
query = cur.mogrify(" UNION ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
def search_pg2(text, event_type, project_id, source, key):
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
if event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)

View file

@ -1,4 +1,5 @@
import json
from typing import List
import chalicelib.utils.helper
import schemas
@ -12,12 +13,38 @@ REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
ALLOW_UPDATE_FOR = ["name", "filter"]
# def filter_stages(stages):
# ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
# events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
# events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
# events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
# return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_ios, schemas.EventType.input_ios,
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
return [e.dict() for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
if f_events is None:
return
events = []
for e in f_events:
if e.operator is None:
e.operator = schemas.SearchEventOperator._is
if not isinstance(e.value, list):
e.value = [e.value]
is_any = sessions._isAny_opreator(e.operator)
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
continue
events.append(e)
return events
def __transform_old_funnels(events):
for e in events:
@ -28,7 +55,7 @@ def __transform_old_funnels(events):
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
# filter.events = filter_stages(stages=filter.events)
filter.events = filter_stages(stages=filter.events)
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""\
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
@ -76,9 +103,12 @@ def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=Non
query
)
r = cur.fetchone()
if r is None:
return {"errors": ["funnel not found"]}
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
return {"data": r}
@ -102,9 +132,9 @@ def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
# row["filter"]["events"] = filter_stages(row["filter"]["events"])
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
if row.get("filter") is not None and row["filter"].get("events") is not None:
row["filter"]["events"] = __transform_old_funnels(row["filter"]["events"])
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
end_date=end_date)
@ -168,9 +198,10 @@ def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=No
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
# data.events = filter_stages(data.events)
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
@ -186,26 +217,37 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data):
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
@ -220,25 +262,26 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
@dev.timed
def get_issues_on_the_fly(funnel_id, user_id, project_id, data):
first_stage = data.get("firstStage")
last_stage = data.get("lastStage")
# data["events"] = filter_stages(data.get("events", []))
if len(data["events"]) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
start_date=data.get('startDate', None),
end_date=data.get('endDate', None))
data = f["filter"]
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
start_date=data.startDate,
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
if len(data.events) < 2:
return {"issues": []}
return {
"issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
last_stage=last_stage))}
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
last_stage=len(data.events)))}
def get(funnel_id, project_id, user_id, flatten=True):
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@ -260,7 +303,11 @@ def get(funnel_id, project_id, user_id, flatten=True):
if f.get("filter") is not None and f["filter"].get("events") is not None:
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
# f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
f["filter"]["events"] = __parse_events(f["filter"]["events"])
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
if fix_stages:
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
if flatten:
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
return f
@ -270,7 +317,7 @@ def get(funnel_id, project_id, user_id, flatten=True):
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date
@ -279,7 +326,7 @@ def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.Funn
end_date=data.endDate)
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data.dict()) \
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
.get("issues", {})
issues = issues.get("significant", []) + issues.get("insignificant", [])
issue = None

View file

@ -13,7 +13,8 @@ def get_by_session_id(session_id):
header_size,
encoded_body_size,
decoded_body_size,
success
success,
COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources
WHERE session_id = %(session_id)s;"""
params = {"session_id": session_id}

View file

@ -168,10 +168,11 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False,
error_status="ALL", count_only=False, issue=None):
full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id,
user_id)
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
@ -198,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = "DESC"
else:
data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})"
else:
sort = 'start_ts'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions)
@ -206,51 +218,58 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
MIN(full_sessions.start_ts) AS first_session_ts,
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
GROUP BY user_id
ORDER BY user_sessions_count DESC) AS users_sessions;""",
ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
) AS filtred_sessions
) AS full_sessions
GROUP BY user_id
) AS users_sessions;""",
full_args)
else:
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""",
full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
cur.execute(main_query)
try:
cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query)
print("--------- PAYLOAD -----------")
print(data.dict())
print("--------------------")
raise err
if errors_only:
return helper.list_to_camel_case(cur.fetchall())
if count_only:
return helper.dict_to_camel_case(cur.fetchone())
sessions = cur.fetchone()
if count_only:
return helper.dict_to_camel_case(sessions)
total = sessions["count"]
sessions = sessions["sessions"]
# sessions = []
# total = cur.rowcount
# row = cur.fetchone()
# limit = 200
# while row is not None and len(sessions) < limit:
# if row.get("favorite"):
# limit += 1
# sessions.append(row)
# row = cur.fetchone()
if errors_only:
return sessions
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
@ -281,9 +300,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
@ -366,6 +385,19 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
return sessions
def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql_details] \
or event.type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
] and (event.source is None or len(event.source) == 0) \
or event.type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
event.filters is None or len(event.filters) == 0))
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
@ -375,11 +407,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
"s.duration IS NOT NULL"
]
extra_from = ""
fav_only_join = ""
if favorite_only and not errors_only:
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
extra_constraints.append("fs.user_id = %(userId)s")
full_args["userId"] = user_id
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@ -586,6 +613,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
valid_events_count += 1
events_query_from = []
event_index = 0
or_events = data.events_order == schemas.SearchEventOrder._or
@ -596,16 +630,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not is_any and len(event.value) == 0 and event_type not in [schemas.EventType.request_details,
schemas.EventType.graphql_details] \
or event_type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
] and (event.source is None or len(event.source) == 0) \
or event_type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
event.filters is None or len(event.filters) == 0):
if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
is_not = False
@ -617,6 +642,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
"main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s",
"ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"]
if favorite_only and not errors_only:
event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)"
event_where.append("fs.user_id = %(userId)s")
else:
event_from = "%s"
event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s",
@ -921,7 +949,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
""")
else:
events_query_from.append(f"""\
(SELECT main.session_id, MIN(main.timestamp) AS timestamp
(SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp
FROM {event_from}
WHERE {" AND ".join(event_where)}
GROUP BY 1
@ -935,16 +963,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
MIN(timestamp) AS first_event_ts,
MAX(timestamp) AS last_event_ts
FROM ({events_joiner.join(events_query_from)}) AS u
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
events_query_part = f"""SELECT
event_0.session_id,
MIN(event_0.timestamp) AS first_event_ts,
MAX(event_{event_index - 1}.timestamp) AS last_event_ts
FROM {events_joiner.join(events_query_from)}
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
data.events = []
# ---------------------------------------------------------------------------
@ -958,24 +984,24 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# elif data.platform == schemas.PlatformType.desktop:
# extra_constraints.append(
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
if error_status != "ALL":
extra_constraints.append("ser.project_id = %(project_id)s")
if error_status != schemas.ErrorStatus.all:
extra_constraints.append("ser.status = %(error_status)s")
full_args["status"] = error_status.lower()
full_args["error_status"] = error_status
if favorite_only:
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(user_id)s")
extra_constraints.append("ufe.user_id = %(userId)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
if not favorite_only and not errors_only and user_id is not None:
if favorite_only and not errors_only and user_id is not None:
extra_from += """INNER JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)"""
elif not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
@ -1003,7 +1029,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
{extra_from}
WHERE
{" AND ".join(extra_constraints)}"""
return full_args, query_part, sort
return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
@ -1102,48 +1128,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date):
return helper.list_to_camel_case(rows)
def get_favorite_sessions(project_id, user_id, include_viewed=False):
with pg_client.PostgresClient() as cur:
query_part = cur.mogrify(f"""\
FROM public.sessions AS s
LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id
WHERE fs.user_id = %(userId)s""",
{"projectId": project_id, "userId": user_id}
)
extra_query = b""
if include_viewed:
extra_query = cur.mogrify(""",\
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS viewed""",
{"projectId": project_id, "userId": user_id})
cur.execute(f"""\
SELECT s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
TRUE AS favorite
{extra_query.decode('UTF-8')}
{query_part.decode('UTF-8')}
ORDER BY s.session_id
LIMIT 50;""")
sessions = cur.fetchall()
return helper.list_to_camel_case(sessions)
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
@ -1196,11 +1180,11 @@ def get_session_user(project_id, user_id):
"public".sessions
WHERE
project_id = %(project_id)s
AND user_id = %(user_id)s
AND user_id = %(userId)s
AND duration is not null
GROUP BY user_id;
""",
{"project_id": project_id, "user_id": user_id}
{"project_id": project_id, "userId": user_id}
)
cur.execute(query=query)
data = cur.fetchone()
@ -1213,8 +1197,8 @@ def get_session_ids_by_user_ids(project_id, user_ids):
"""\
SELECT session_id FROM public.sessions
WHERE
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
{"project_id": project_id, "user_id": tuple(user_ids)}
project_id = %(project_id)s AND user_id IN %(userId)s;""",
{"project_id": project_id, "userId": tuple(user_ids)}
)
ids = cur.execute(query=query)
return ids
@ -1240,8 +1224,8 @@ def delete_sessions_by_user_ids(project_id, user_ids):
"""\
DELETE FROM public.sessions
WHERE
project_id = %(project_id)s AND user_id IN %(user_id)s;""",
{"project_id": project_id, "user_id": tuple(user_ids)}
project_id = %(project_id)s AND user_id IN %(userId)s;""",
{"project_id": project_id, "userId": tuple(user_ids)}
)
cur.execute(query=query)

View file

@ -80,32 +80,41 @@ def get_top_key_values(project_id):
return helper.dict_to_CAPITAL_keys(row)
def __generic_query(typename):
return f"""\
SELECT value, type
FROM ((SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f""" (SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION
(SELECT value, type
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)) AS met"""
LIMIT 5);"""
return f""" SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(__generic_query(typename),
query = cur.mogrify(__generic_query(typename,
value_length=len(text) \
if SUPPORTED_TYPES[typename].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
@ -120,124 +129,73 @@ SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
value_limit=5,
starts_with="/",
starts_limit=5,
ignore_if_starts_with=[]),
change_by_length=True),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
}
@ -247,6 +205,7 @@ def search(text, meta_type, project_id):
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
# for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}

View file

@ -118,12 +118,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
i = -1
for s in stages:
for i, s in enumerate(stages):
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
if s.get("operator") is None:
s["operator"] = "is"
@ -132,6 +129,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
is_any = sessions._isAny_opreator(s["operator"])
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
continue
i += 1
if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
@ -213,7 +215,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
) AS issues_t
@ -526,7 +528,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
split = issue.split('__^__')
issues_dict['significant' if is_sign else 'insignificant'].append({
"type": split[0],
"title": get_issue_title(split[0]),
"title": helper.get_issue_title(split[0]),
"affected_sessions": affected_sessions[issue],
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
"lost_conversions": lost_conversions,
@ -639,27 +641,3 @@ def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
output['stages'] = stages_list
output['criticalIssuesCount'] = n_critical_issues
return output
def get_issue_title(issue_type):
return {'click_rage': "Click Rage",
'dead_click': "Dead Click",
'excessive_scrolling': "Excessive Scrolling",
'bad_request': "Bad Request",
'missing_resource': "Missing Image",
'memory': "High Memory Usage",
'cpu': "High CPU",
'slow_resource': "Slow Resource",
'slow_page_load': "Slow Page Performance",
'crash': "Crash",
'ml_cpu': "High CPU",
'ml_memory': "High Memory Usage",
'ml_dead_click': "Dead Click",
'ml_click_rage': "Click Rage",
'ml_mouse_thrashing': "Mouse Thrashing",
'ml_excessive_scrolling': "Excessive Scrolling",
'ml_slow_resources': "Slow Resource",
'custom': "Custom Event",
'js_exception': "Error",
'custom_event_error': "Custom Error",
'js_error': "Error"}.get(issue_type, issue_type)

View file

@ -6,10 +6,7 @@ class Event:
class SupportedFilter:
def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with):
def __init__(self, get, query, change_by_length):
self.get = get
self.query = query
self.valueLimit = value_limit
self.startsWith = starts_with
self.startsLimit = starts_limit
self.ignoreIfStartsWith = ignore_if_starts_with
self.change_by_length = change_by_length

View file

@ -213,11 +213,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
if value is None:
return value
if op == schemas.SearchEventOperator._starts_with:
return value + '%'
return f"{value}%"
elif op == schemas.SearchEventOperator._ends_with:
return '%' + value
return f"%{value}"
elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains:
return '%' + value + '%'
return f"%{value}%"
return value

View file

@ -5,11 +5,12 @@ import psycopg2.extras
from decouple import config
from psycopg2 import pool
PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
_PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
@ -63,7 +64,7 @@ class PostgresClient:
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
self.connection = psycopg2.connect(**PG_CONFIG)
self.connection = psycopg2.connect(**_PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()

View file

@ -21,13 +21,6 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/sessions2/favorite', tags=["sessions"])
def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True)
}
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
@ -126,14 +119,14 @@ def events_search(projectId: int, q: str,
else:
return {"data": []}
result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key)
result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key)
return result
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search2_pg(data, projectId, user_id=context.user_id)
data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext =
return {'data': sessions_metas.get_top_key_values(projectId)}
@app.get('/{projectId}/sessions/filters/search', tags=["sessions"])
def get_session_filters_meta(projectId: int, q: str, type: str,
context: schemas.CurrentContext = Depends(OR_context)):
meta_type = type
if len(meta_type) == 0:
return {"data": []}
if len(q) == 0:
return {"data": []}
return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
@ -716,7 +698,7 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())
data=data)
@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
@ -731,7 +713,7 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
data=data.dict())}
data=data)}
@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
@ -755,10 +737,11 @@ def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.
def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None,
context: schemas.CurrentContext = Depends(OR_context)):
issue = issues.get(project_id=projectId, issue_id=issueId)
if issue is None:
return {"errors": ["issue not found"]}
return {
"data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue,
start_date=startDate,
end_date=endDate),
start_date=startDate, end_date=endDate),
"issue": issue}}
@ -837,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, user_id=userId)
return {'data': data}
@ -901,13 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False,
data: schemas.SearchErrorsSchema = Body(...),
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(favorite, str):
favorite = True if len(favorite) == 0 else False
return errors.search(data.dict(), projectId, user_id=context.user_id, status=status,
favorite_only=favorite)
return errors.search(data, projectId, user_id=context.user_id)
@app.get('/{projectId}/errors/stats', tags=['errors'])

View file

@ -11,6 +11,10 @@ def attribute_to_camel_case(snake_str):
return components[0] + ''.join(x.title() for x in components[1:])
def transform_email(email: str) -> str:
return email.lower() if isinstance(email, str) else email
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@ -18,6 +22,7 @@ class _Grecaptcha(BaseModel):
class UserLoginSchema(_Grecaptcha):
email: EmailStr = Field(...)
password: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class UserSignupSchema(UserLoginSchema):
@ -31,17 +36,21 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
email: Optional[str] = Field(None)
email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserPasswordSchema(BaseModel):
@ -70,7 +79,9 @@ class CurrentAPIContext(BaseModel):
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
@ -83,15 +94,6 @@ class EditSlackSchema(BaseModel):
url: HttpUrl = Field(...)
class SearchErrorsSchema(BaseModel):
platform: Optional[str] = Field(None)
startDate: Optional[int] = Field(TimeUTC.now(-7))
endDate: Optional[int] = Field(TimeUTC.now())
density: Optional[int] = Field(7)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CreateNotificationSchema(BaseModel):
token: str = Field(...)
notifications: List = Field(...)
@ -124,15 +126,19 @@ class CreateEditWebhookSchema(BaseModel):
class CreateMemberSchema(BaseModel):
userId: Optional[int] = Field(None)
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditMemberSchema(BaseModel):
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
@ -253,6 +259,8 @@ class EmailPayloadSchema(BaseModel):
link: str = Field(...)
message: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
@ -261,6 +269,8 @@ class MemberInvitationPayloadSchema(BaseModel):
client_id: str = Field(...)
sender_name: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class Config:
alias_generator = attribute_to_camel_case
@ -609,11 +619,12 @@ class SessionsSearchPayloadSchema(BaseModel):
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(default="startTs")
order: str = Field(default="DESC")
order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
bookmarked: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
@ -662,6 +673,7 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
@root_validator(pre=True)
def enforce_default_values(cls, values):
@ -694,6 +706,27 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
order: Optional[str] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
group_by_user: Optional[bool] = Field(default=False, const=True)
rangeValue: Optional[str] = Field(None)
class ErrorStatus(str, Enum):
all = 'all'
unresolved = 'unresolved'
resolved = 'resolved'
ignored = 'ignored'
class ErrorSort(str, Enum):
occurrence = 'occurrence'
users_count = 'users'
sessions_count = 'sessions'
class SearchErrorsSchema(SessionsSearchPayloadSchema):
sort: ErrorSort = Field(default=ErrorSort.occurrence)
density: Optional[int] = Field(7)
status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
query: Optional[str] = Field(default=None)
class MetricPayloadSchema(BaseModel):

View file

@ -9,16 +9,17 @@ import (
func getSessionKey(sessionID uint64) string {
// Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID
return strconv.FormatUint(sessionID>>50, 10)
return strconv.FormatUint(sessionID>>50, 10)
}
func ResolveURL(baseurl string, rawurl string) string {
rawurl = strings.Trim(rawurl, " ")
if !isRelativeCachable(rawurl) {
return rawurl
}
base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
return rawurl
}
return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl)
@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string {
}
func GetCachePathForAssets(sessionID uint64, rawurl string) string {
return getCachePathWithKey(sessionID, rawurl)
return getCachePathWithKey(sessionID, rawurl)
}
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string {
fullURL, cachable := GetFullCachableURL(baseURL, relativeURL)
if !cachable {
return fullURL
}
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
}
return u.String()
}

View file

@ -1,8 +1,9 @@
import json
import schemas
from chalicelib.core import dashboard
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import ch_client
from chalicelib.utils import ch_client, metrics_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -265,7 +266,7 @@ def get_details(project_id, error_id, user_id, **data):
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE pe.error_id = fe.error_id
AND fe.user_id = %(user_id)s), FALSE) AS favorite,
AND fe.user_id = %(userId)s), FALSE) AS favorite,
True AS viewed
FROM public.errors AS pe
INNER JOIN events.errors AS ee USING (error_id)
@ -274,7 +275,7 @@ def get_details(project_id, error_id, user_id, **data):
AND error_id = %(error_id)s
ORDER BY start_ts DESC
LIMIT 1;""",
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
{"project_id": project_id, "error_id": error_id, "userId": user_id})
cur.execute(query=query)
status = cur.fetchone()
@ -423,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if time_constraint:
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
if platform == 'mobile':
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == 'desktop':
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@ -437,60 +438,280 @@ def __get_step_size(startTimestamp, endTimestamp, density):
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
ch_sub_query = __get_basic_constraints(data.get('platform'))
ch_sub_query.append("source ='js_exception'")
def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-30)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now(1)
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_ids = [e["error_id"] for e in statuses]
error_status=data.status)
if len(statuses) == 0:
return {"data": {
'total': 0,
'errors': []
}}
with ch_client.ClickHouseClient() as ch:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
density = data.get("density", 7)
step_size = __get_step_size(data["startDate"], data["endDate"], density)
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.get("sort") is not None:
sort = __get_sort_key(data["sort"])
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.get("order") is not None:
order = data["order"]
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
chart
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
}
# refactor this function after clickhouse structure changes (missing search by query)
def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
ch_sub_query = __get_basic_constraints(platform)
ch_sub_query.append("source ='js_exception'")
statuses = []
error_ids = None
# Clickhouse keeps data for the past month only, so no need to search beyond that
if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = __get_step_size(data.startDate, data.endDate, data.density)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.order is not None:
order = data.order
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if data.bookmarked:
cur.execute(cur.mogrify(f"""SELECT error_id
FROM public.user_favorite_errors
WHERE user_id = %(userId)s
{"" if error_ids is None else "AND error_id IN %(error_ids)s"}""",
{"userId": user_id, "error_ids": tuple(error_ids or [])}))
error_ids = cur.fetchall()
if len(error_ids) == 0:
return empty_response
error_ids = [e["error_id"] for e in error_ids]
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
ch_sub_query.append("error_id IN %(error_ids)s")
main_ch_query = f"""\
SELECT COUNT(DISTINCT error_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)};"""
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
total = ch.execute(query=main_ch_query, params=params)[0]["count"]
if flows:
return {"data": {"count": total}}
@ -510,9 +731,10 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
@ -523,35 +745,36 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
# print("--------------------")
# print(main_ch_query % params)
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
rows = ch.execute(query=main_ch_query, params=params)
if len(statuses) == 0:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"userId": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
@ -565,9 +788,9 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
r["chart"] = list(r["chart"])
for i in range(len(r["chart"])):
r["chart"][i] = {"timestamp": r["chart"][i][0], "count": r["chart"][i][1]}
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data["startDate"],
end_time=data["endDate"],
density=density, neutral={"count": 0})
r["chart"] = dashboard.__complete_missing_steps(rows=r["chart"], start_time=data.startDate,
end_time=data.endDate,
density=data.density, neutral={"count": 0})
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
@ -593,7 +816,7 @@ def __save_stacktrace(error_id, data):
def get_trace(project_id, error_id):
error = get(error_id=error_id)
error = get(error_id=error_id, family=False)
if error is None:
return {"errors": ["error not found"]}
if error.get("source", "") != "js_exception":
@ -766,7 +989,7 @@ def format_first_stack_frame(error):
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
@ -780,7 +1003,7 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
{"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()

View file

@ -7,7 +7,7 @@ def get_by_session_id(session_id):
with ch_client.ClickHouseClient() as ch:
ch_query = """\
SELECT
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s);"""
params = {"session_id": session_id}

View file

@ -41,6 +41,11 @@ def login(data: schemas.UserLoginSchema = Body(...)):
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
if "errors" in r:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
tenant_id = r.pop("tenantId")

View file

@ -9,9 +9,9 @@ import (
"github.com/pkg/errors"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/queue/types"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
)
type Message = kafka.Message
@ -19,7 +19,7 @@ type Message = kafka.Message
type Consumer struct {
c *kafka.Consumer
messageHandler types.MessageHandler
commitTicker *time.Ticker
commitTicker *time.Ticker
pollTimeout uint
lastKafkaEventTs int64
@ -56,7 +56,7 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand
return &Consumer{
c: c,
messageHandler: messageHandler,
commitTicker: time.NewTicker(2 * time.Minute),
commitTicker: time.NewTicker(2 * time.Minute),
pollTimeout: 200,
}
}
@ -65,13 +65,12 @@ func (consumer *Consumer) DisableAutoCommit() {
consumer.commitTicker.Stop()
}
func (consumer *Consumer) Commit() error {
consumer.c.Commit() // TODO: return error if it is not "No offset stored"
return nil
}
func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
assigned, err := consumer.c.Assignment()
if err != nil {
return err
@ -84,37 +83,38 @@ func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
timestamps = append(timestamps, p)
}
offsets, err := consumer.c.OffsetsForTimes(timestamps, 2000)
if err != nil {
if err != nil {
return errors.Wrap(err, "Kafka Consumer back commit error")
}
// Limiting to already committed
committed, err := consumer.c.Committed(assigned, 2000) // memorise?
logPartitions("Actually committed:",committed)
logPartitions("Actually committed:", committed)
if err != nil {
return errors.Wrap(err, "Kafka Consumer retrieving committed error")
}
for _, offs := range offsets {
for _, comm := range committed {
if comm.Offset == kafka.OffsetStored ||
if comm.Offset == kafka.OffsetStored ||
comm.Offset == kafka.OffsetInvalid ||
comm.Offset == kafka.OffsetBeginning ||
comm.Offset == kafka.OffsetEnd { continue }
if comm.Partition == offs.Partition &&
comm.Offset == kafka.OffsetBeginning ||
comm.Offset == kafka.OffsetEnd {
continue
}
if comm.Partition == offs.Partition &&
(comm.Topic != nil && offs.Topic != nil && *comm.Topic == *offs.Topic) &&
comm.Offset > offs.Offset {
comm.Offset > offs.Offset {
offs.Offset = comm.Offset
}
}
}
// TODO: check per-partition errors: offsets[i].Error
// TODO: check per-partition errors: offsets[i].Error
_, err = consumer.c.CommitOffsets(offsets)
return errors.Wrap(err, "Kafka Consumer back commit error")
}
func (consumer *Consumer) CommitBack(gap int64) error {
func (consumer *Consumer) CommitBack(gap int64) error {
if consumer.lastKafkaEventTs == 0 {
return nil
}
@ -135,31 +135,31 @@ func (consumer *Consumer) ConsumeNext() error {
}
switch e := ev.(type) {
case *kafka.Message:
if e.TopicPartition.Error != nil {
return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
}
ts := e.Timestamp.UnixNano()/ 1e6
consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
Topic: *(e.TopicPartition.Topic),
ID: uint64(e.TopicPartition.Offset),
Timestamp: ts,
})
consumer.lastKafkaEventTs = ts
// case kafka.AssignedPartitions:
// logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
// consumer.partitions = e.Partitions
// consumer.c.Assign(e.Partitions)
// log.Printf("Actually partitions assigned!")
// case kafka.RevokedPartitions:
// log.Println("Kafka Cosumer: Partitions Revoked")
// consumer.partitions = nil
// consumer.c.Unassign()
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown {
os.Exit(1)
}
log.Printf("Consumer error: %v\n", e)
case *kafka.Message:
if e.TopicPartition.Error != nil {
return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
}
ts := e.Timestamp.UnixNano() / 1e6
consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
Topic: *(e.TopicPartition.Topic),
ID: uint64(e.TopicPartition.Offset),
Timestamp: ts,
})
consumer.lastKafkaEventTs = ts
// case kafka.AssignedPartitions:
// logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
// consumer.partitions = e.Partitions
// consumer.c.Assign(e.Partitions)
// log.Printf("Actually partitions assigned!")
// case kafka.RevokedPartitions:
// log.Println("Kafka Cosumer: Partitions Revoked")
// consumer.partitions = nil
// consumer.c.Unassign()
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded {
os.Exit(1)
}
log.Printf("Consumer error: %v\n", e)
}
return nil
}
@ -173,8 +173,6 @@ func (consumer *Consumer) Close() {
}
}
// func (consumer *Consumer) consume(
// message func(m *kafka.Message) error,
// commit func(c *kafka.Consumer) error,
@ -230,7 +228,6 @@ func (consumer *Consumer) Close() {
// }
// }
// func (consumer *Consumer) Consume(
// message func(key uint64, value []byte) error,
// ) error {

View file

@ -0,0 +1,91 @@
\set ON_ERROR_STOP true
SET client_min_messages TO NOTICE;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3-ee'
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -721,7 +721,22 @@ $$
CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
BEGIN
IF NOT EXISTS(SELECT *
@ -1018,7 +1033,7 @@ $$
CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE TABLE IF NOT EXISTS events.state_actions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,

View file

@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers
var express = require('express');
const {ExpressPeerServer} = require('peer');
var socket;
if (process.env.cluster === "true") {
if (process.env.redis === "true") {
console.log("Using Redis");
socket = require("./servers/websocket-cluster");
} else {

View file

@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader;
const {extractPeerId} = require('./peerjs-server');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
var wsRouter = express.Router();
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
let io;
const debug = process.env.debug === "1" || false;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
}
const uniqueSessions = function (data) {
let resArr = [];
@ -36,18 +58,40 @@ const uniqueSessions = function (data) {
return resArr;
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
let result = {"data": liveSessions};
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
let rooms = await io.of('/').adapter.allRooms();
return rooms;
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -56,37 +100,64 @@ const socketsList = async function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
@ -94,51 +165,48 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -219,35 +287,13 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
socket.identity = socket.handshake.query.identity;
let {projectKey, sessionId} = extractPeerId(socket.peerId);
const {projectKey, sessionId} = extractPeerId(socket.peerId);
socket.sessionId = sessionId;
socket.projectKey = projectKey;
socket.lastMessageReceivedAt = Date.now();

View file

@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
var {extractPeerId} = require('./peerjs-server');
var wsRouter = express.Router();
const {extractPeerId} = require('./peerjs-server');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
let debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1" || false;
const socketsList = function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
}
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
let result = {"data": liveSessions};
}
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -36,84 +82,111 @@ const socketsList = function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -192,29 +265,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@ -285,10 +337,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@ -302,7 +354,7 @@ module.exports = {
});
console.log("WS server started")
setInterval((io) => {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);

1
frontend/.gitignore vendored
View file

@ -8,3 +8,4 @@ app/components/ui/SVG.js
*.DS_Store
.env
*css.d.ts
*.cache

View file

@ -13,7 +13,8 @@ import withLocationHandlers from "HOCs/withLocationHandlers";
import { fetch as fetchFilterVariables } from 'Duck/sources';
import { fetchSources } from 'Duck/customField';
import { RehydrateSlidePanel } from './WatchDogs/components';
import { setActiveTab, setFunnelPage } from 'Duck/sessions';
import { setFunnelPage } from 'Duck/sessions';
import { setActiveTab } from 'Duck/search';
import SessionsMenu from './SessionsMenu/SessionsMenu';
import { LAST_7_DAYS } from 'Types/app/period';
import { resetFunnel } from 'Duck/funnels';
@ -51,12 +52,12 @@ const allowedQueryKeys = [
variables: state.getIn([ 'customFields', 'list' ]),
sources: state.getIn([ 'customFields', 'sources' ]),
filterValues: state.get('filterValues'),
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
favoriteList: state.getIn([ 'sessions', 'favoriteList' ]),
currentProjectId: state.getIn([ 'user', 'siteId' ]),
sites: state.getIn([ 'site', 'list' ]),
watchdogs: state.getIn(['watchdogs', 'list']),
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
sessions: state.getIn([ 'sessions', 'list' ]),
}), {
fetchFavoriteSessionList,
applyFilter,
@ -91,7 +92,9 @@ export default class BugFinder extends React.PureComponent {
// keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
// };
// });
props.fetchSessions();
if (props.sessions.size === 0) {
props.fetchSessions();
}
props.resetFunnel();
props.resetFunnelFilters();
props.fetchFunnelsList(LAST_7_DAYS)
@ -115,7 +118,6 @@ export default class BugFinder extends React.PureComponent {
}
render() {
const { activeFlow, activeTab } = this.props;
const { showRehydratePanel } = this.state;
return (

View file

@ -1,7 +1,7 @@
import { connect } from 'react-redux';
import { Loader, NoContent, Button, LoadMoreButton } from 'UI';
import { Loader, NoContent, Button, LoadMoreButton, Pagination } from 'UI';
import { applyFilter, addAttribute, addEvent } from 'Duck/filters';
import { fetchSessions, addFilterByKeyAndValue } from 'Duck/search';
import { fetchSessions, addFilterByKeyAndValue, updateCurrentPage } from 'Duck/search';
import SessionItem from 'Shared/SessionItem';
import SessionListHeader from './SessionListHeader';
import { FilterKey } from 'Types/filter/filterType';
@ -15,17 +15,20 @@ var timeoutId;
shouldAutorefresh: state.getIn([ 'filters', 'appliedFilter', 'events' ]).size === 0,
savedFilters: state.getIn([ 'filters', 'list' ]),
loading: state.getIn([ 'sessions', 'loading' ]),
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
allList: state.getIn([ 'sessions', 'list' ]),
total: state.getIn([ 'sessions', 'total' ]),
filters: state.getIn([ 'search', 'instance', 'filters' ]),
metaList: state.getIn(['customFields', 'list']).map(i => i.key),
currentPage: state.getIn([ 'search', 'currentPage' ]),
lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]),
}), {
applyFilter,
addAttribute,
addEvent,
fetchSessions,
addFilterByKeyAndValue,
updateCurrentPage,
})
export default class SessionList extends React.PureComponent {
state = {
@ -76,6 +79,8 @@ export default class SessionList extends React.PureComponent {
clearTimeout(timeoutId)
}
renderActiveTabContent(list) {
const {
loading,
@ -84,6 +89,9 @@ export default class SessionList extends React.PureComponent {
allList,
activeTab,
metaList,
currentPage,
total,
lastPlayedSessionId,
} = this.props;
const _filterKeys = filters.map(i => i.key);
const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID);
@ -93,49 +101,47 @@ export default class SessionList extends React.PureComponent {
return (
<NoContent
title={this.getNoContentMessage(activeTab)}
subtext="Please try changing your search parameters."
// subtext="Please try changing your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
subtext={
<div>
<div>Please try changing your search parameters.</div>
{allList.size > 0 && (
<div className="pt-2">
However, we found other sessions based on your search parameters.
<div>
<Button
plain
onClick={() => onMenuItemClick({ name: 'All', type: 'all' })}
>See All</Button>
<div>
<div>Please try changing your search parameters.</div>
{allList.size > 0 && (
<div className="pt-2">
However, we found other sessions based on your search parameters.
<div>
<Button
plain
onClick={() => onMenuItemClick({ name: 'All', type: 'all' })}
>See All</Button>
</div>
</div>
</div>
)}
</div>
)}
</div>
}
>
<Loader loading={ loading }>
{ list.take(displayedCount).map(session => (
{ list.map(session => (
<SessionItem
key={ session.sessionId }
session={ session }
hasUserFilter={hasUserFilter}
onUserClick={this.onUserClick}
metaList={metaList}
lastPlayedSessionId={lastPlayedSessionId}
/>
))}
</Loader>
<LoadMoreButton
className="mt-12 mb-12"
displayedCount={displayedCount}
totalCount={list.size}
loading={loading}
onClick={this.addPage}
description={ displayedCount === list.size &&
<div className="color-gray-medium text-sm text-center my-3">
Haven't found the session in the above list? <br/>Try being a bit more specific by setting a specific time frame or simply use different filters
</div>
}
/>
<div className="w-full flex items-center justify-center py-6">
<Pagination
page={currentPage}
totalPages={Math.ceil(total / PER_PAGE)}
onPageChange={(page) => this.props.updateCurrentPage(page)}
limit={PER_PAGE}
debounceRequest={1000}
/>
</div>
</NoContent>
);
}

View file

@ -64,5 +64,5 @@ function SessionListHeader({
};
export default connect(state => ({
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
}), { applyFilter })(SessionListHeader);

View file

@ -1,30 +1,20 @@
import React, { useEffect } from 'react'
import React from 'react'
import { connect } from 'react-redux';
import cn from 'classnames';
import { SideMenuitem, SavedSearchList, Progress, Popup, Icon, CircularLoader } from 'UI'
import { SideMenuitem, SavedSearchList, Progress, Popup } from 'UI'
import stl from './sessionMenu.css';
import { fetchWatchdogStatus } from 'Duck/watchdogs';
import { setActiveFlow, clearEvents } from 'Duck/filters';
import { setActiveTab } from 'Duck/sessions';
import { clearEvents } from 'Duck/filters';
import { issues_types } from 'Types/session/issue'
import { fetchList as fetchSessionList } from 'Duck/sessions';
function SessionsMenu(props) {
const {
activeFlow, activeTab, watchdogs = [], keyMap, wdTypeCount,
fetchWatchdogStatus, toggleRehydratePanel, filters, sessionsLoading } = props;
const { activeTab, keyMap, wdTypeCount, toggleRehydratePanel } = props;
const onMenuItemClick = (filter) => {
props.onMenuItemClick(filter)
if (activeFlow && activeFlow.type === 'flows') {
props.setActiveFlow(null)
}
}
// useEffect(() => {
// fetchWatchdogStatus()
// }, [])
const capturingAll = props.captureRate && props.captureRate.get('captureAll');
@ -66,36 +56,13 @@ function SessionsMenu(props) {
{ issues_types.filter(item => item.visible).map(item => (
<SideMenuitem
key={item.key}
disabled={!keyMap[item.type] && !wdTypeCount[item.type]}
// disabled={!keyMap[item.type] && !wdTypeCount[item.type]}
active={activeTab.type === item.type}
title={item.name} iconName={item.icon}
onClick={() => onMenuItemClick(item)}
/>
))}
{/* <div className={stl.divider} />
<div className="my-3">
<SideMenuitem
title={
<div className="flex items-center">
<div>Assist</div>
{ activeTab.type === 'live' && (
<div
className="ml-4 h-5 w-6 flex items-center justify-center"
onClick={() => !sessionsLoading && props.fetchSessionList(filters.toJS())}
>
{ sessionsLoading ? <CircularLoader className="ml-1" /> : <Icon name="sync-alt" size="14" />}
</div>
)}
</div>
}
iconName="person"
active={activeTab.type === 'live'}
onClick={() => onMenuItemClick({ name: 'Assist', type: 'live' })}
/>
</div> */}
<div className={stl.divider} />
<div className="my-3">
<SideMenuitem
@ -113,13 +80,12 @@ function SessionsMenu(props) {
}
export default connect(state => ({
activeTab: state.getIn([ 'sessions', 'activeTab' ]),
activeTab: state.getIn([ 'search', 'activeTab' ]),
keyMap: state.getIn([ 'sessions', 'keyMap' ]),
wdTypeCount: state.getIn([ 'sessions', 'wdTypeCount' ]),
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
captureRate: state.getIn(['watchdogs', 'captureRate']),
filters: state.getIn([ 'filters', 'appliedFilter' ]),
sessionsLoading: state.getIn([ 'sessions', 'fetchLiveListRequest', 'loading' ]),
}), {
fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab, fetchSessionList
fetchWatchdogStatus, clearEvents, fetchSessionList
})(SessionsMenu);

View file

@ -1,23 +1,19 @@
import { connect } from 'react-redux';
import withSiteIdRouter from 'HOCs/withSiteIdRouter';
import withPermissions from 'HOCs/withPermissions'
import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo";
import { getRE } from 'App/utils';
import { fetchBookmarks } from "Duck/errors";
import { UNRESOLVED, RESOLVED, IGNORED, BOOKMARK } from "Types/errorInfo";
import { fetchBookmarks, editOptions } from "Duck/errors";
import { applyFilter } from 'Duck/filters';
import { fetchList as fetchSlackList } from 'Duck/integrations/slack';
import { errors as errorsRoute, isRoute } from "App/routes";
import EventFilter from 'Components/BugFinder/EventFilter';
import DateRange from 'Components/BugFinder/DateRange';
import withPageTitle from 'HOCs/withPageTitle';
import { SavedSearchList } from 'UI';
import cn from 'classnames';
import List from './List/List';
import ErrorInfo from './Error/ErrorInfo';
import Header from './Header';
import SideMenuSection from './SideMenu/SideMenuSection';
import SideMenuHeader from './SideMenu/SideMenuHeader';
import SideMenuDividedItem from './SideMenu/SideMenuDividedItem';
const ERRORS_ROUTE = errorsRoute();
@ -39,44 +35,26 @@ function getStatusLabel(status) {
@withSiteIdRouter
@connect(state => ({
list: state.getIn([ "errors", "list" ]),
status: state.getIn([ "errors", "options", "status" ]),
}), {
fetchBookmarks,
applyFilter,
fetchSlackList,
editOptions,
})
@withPageTitle("Errors - OpenReplay")
export default class Errors extends React.PureComponent {
state = {
status: UNRESOLVED,
bookmarksActive: false,
currentList: this.props.list.filter(e => e.status === UNRESOLVED),
filter: '',
constructor(props) {
super(props)
this.state = {
filter: '',
}
}
componentDidMount() {
this.props.fetchSlackList(); // Delete after implementing cache
}
onFilterChange = ({ target: { value } }) => this.setState({ filter: value })
componentDidUpdate(prevProps, prevState) {
const { bookmarksActive, status, filter } = this.state;
const { list } = this.props;
if (prevProps.list !== list
|| prevState.status !== status
|| prevState.bookmarksActive !== bookmarksActive
|| prevState.filter !== filter) {
const unfiltered = bookmarksActive
? list
: list.filter(e => e.status === status);
const filterRE = getRE(filter);
this.setState({
currentList: unfiltered
.filter(e => filterRE.test(e.name) || filterRE.test(e.message)),
})
}
}
ensureErrorsPage() {
const { history } = this.props;
if (!isRoute(ERRORS_ROUTE, history.location.pathname)) {
@ -85,22 +63,11 @@ export default class Errors extends React.PureComponent {
}
onStatusItemClick = ({ key }) => {
if (this.state.bookmarksActive) {
this.props.applyFilter();
}
this.setState({
status: key,
bookmarksActive: false,
});
this.ensureErrorsPage();
this.props.editOptions({ status: key });
}
onBookmarksClick = () => {
this.setState({
bookmarksActive: true,
});
this.props.fetchBookmarks();
this.ensureErrorsPage();
this.props.editOptions({ status: BOOKMARK });
}
@ -110,12 +77,14 @@ export default class Errors extends React.PureComponent {
match: {
params: { errorId }
},
status,
list,
history,
} = this.props;
const { status, bookmarksActive, currentList } = this.state;
return (
<div className="page-margin container-90" >
<div className="side-menu">
<div className={cn("side-menu", {'disabled' : !isRoute(ERRORS_ROUTE, history.location.pathname)})}>
<SideMenuSection
title="Errors"
onItemClick={this.onStatusItemClick}
@ -137,14 +106,14 @@ export default class Errors extends React.PureComponent {
icon: "ban",
label: getStatusLabel(IGNORED),
active: status === IGNORED,
}
}
]}
/>
<SideMenuDividedItem
className="mt-3 mb-4"
iconName="star"
title="Bookmarks"
active={ bookmarksActive }
active={ status === BOOKMARK }
onClick={ this.onBookmarksClick }
/>
</div>
@ -154,8 +123,8 @@ export default class Errors extends React.PureComponent {
<>
<div className="mb-5 flex">
<Header
text={ bookmarksActive ? "Bookmarks" : getStatusLabel(status) }
count={ currentList.size }
text={ status === BOOKMARK ? "Bookmarks" : getStatusLabel(status) }
count={ list.size }
/>
<div className="ml-3 flex items-center">
<span className="mr-2 color-gray-medium">Seen in</span>
@ -164,12 +133,11 @@ export default class Errors extends React.PureComponent {
</div>
<List
status={ status }
list={ currentList }
onFilterChange={this.onFilterChange}
list={ list }
/>
</>
:
<ErrorInfo errorId={ errorId } list={ currentList } />
<ErrorInfo errorId={ errorId } list={ list } />
}
</div>
</div>

View file

@ -1,53 +1,62 @@
import cn from 'classnames';
import { connect } from 'react-redux';
import { Set, List as ImmutableList } from "immutable";
import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain } from 'UI';
import { merge, resolve, unresolve, ignore, updateCurrentPage } from "Duck/errors";
import { NoContent, Loader, Checkbox, LoadMoreButton, IconButton, Input, DropdownPlain, Pagination } from 'UI';
import { merge, resolve, unresolve, ignore, updateCurrentPage, editOptions } from "Duck/errors";
import { applyFilter } from 'Duck/filters';
import { IGNORED, RESOLVED, UNRESOLVED } from 'Types/errorInfo';
import SortDropdown from 'Components/BugFinder/Filters/SortDropdown';
import Divider from 'Components/Errors/ui/Divider';
import ListItem from './ListItem/ListItem';
import { debounce } from 'App/utils';
const PER_PAGE = 5;
const DEFAULT_SORT = 'lastOccurrence';
const DEFAULT_ORDER = 'desc';
const PER_PAGE = 10;
const sortOptionsMap = {
'lastOccurrence-desc': 'Last Occurrence',
'firstOccurrence-desc': 'First Occurrence',
'sessions-asc': 'Sessions Ascending',
'sessions-desc': 'Sessions Descending',
'users-asc': 'Users Ascending',
'users-desc': 'Users Descending',
'occurrence-desc': 'Last Occurrence',
'occurrence-desc': 'First Occurrence',
'sessions-asc': 'Sessions Ascending',
'sessions-desc': 'Sessions Descending',
'users-asc': 'Users Ascending',
'users-desc': 'Users Descending',
};
const sortOptions = Object.entries(sortOptionsMap)
.map(([ value, text ]) => ({ value, text }));
@connect(state => ({
loading: state.getIn([ "errors", "loading" ]),
resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) ||
state.getIn(["errors", "unresolve", "loading"]),
ignoreLoading: state.getIn([ "errors", "ignore", "loading" ]),
mergeLoading: state.getIn([ "errors", "merge", "loading" ]),
currentPage: state.getIn(["errors", "currentPage"]),
currentPage: state.getIn(["errors", "currentPage"]),
total: state.getIn([ 'errors', 'totalCount' ]),
sort: state.getIn([ 'errors', 'options', 'sort' ]),
order: state.getIn([ 'errors', 'options', 'order' ]),
query: state.getIn([ "errors", "options", "query" ]),
}), {
merge,
resolve,
unresolve,
ignore,
applyFilter,
updateCurrentPage,
updateCurrentPage,
editOptions,
})
export default class List extends React.PureComponent {
state = {
checkedAll: false,
checkedIds: Set(),
sort: {}
constructor(props) {
super(props)
this.state = {
checkedAll: false,
checkedIds: Set(),
query: props.query,
}
this.debounceFetch = debounce(this.props.editOptions, 1000);
}
componentDidMount() {
this.props.applyFilter({ sort: DEFAULT_SORT, order: DEFAULT_ORDER, events: ImmutableList(), filters: ImmutableList() });
if (this.props.list.size === 0) {
this.props.applyFilter({ });
}
}
check = ({ errorId }) => {
@ -111,8 +120,14 @@ export default class List extends React.PureComponent {
writeOption = (e, { name, value }) => {
const [ sort, order ] = value.split('-');
const sign = order === 'desc' ? -1 : 1;
this.setState({ sort: { sort, order }})
if (name === 'sort') {
this.props.editOptions({ sort, order });
}
}
onQueryChange = (e, { value }) => {
this.setState({ query: value });
this.debounceFetch({ query: value });
}
render() {
@ -123,19 +138,18 @@ export default class List extends React.PureComponent {
ignoreLoading,
resolveToggleLoading,
mergeLoading,
onFilterChange,
currentPage,
currentPage,
total,
sort,
order,
} = this.props;
const {
checkedAll,
checkedIds,
sort
query,
} = this.state;
const someLoading = loading || ignoreLoading || resolveToggleLoading || mergeLoading;
const currentCheckedIds = this.currentCheckedIds();
const displayedCount = Math.min(currentPage * PER_PAGE, list.size);
let _list = sort.sort ? list.sortBy(i => i[sort.sort]) : list;
_list = sort.order === 'desc' ? _list.reverse() : _list;
return (
<div className="bg-white p-5 border-radius-3 thin-gray-border">
@ -182,33 +196,35 @@ export default class List extends React.PureComponent {
}
</div>
<div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium">Sort By</span>
<span className="mr-2 color-gray-medium">Sort By</span>
<DropdownPlain
name="type"
options={ sortOptions }
onChange={ this.writeOption }
/>
<Input
defaultValue={ `${sort}-${order}` }
name="sort"
options={ sortOptions }
onChange={ this.writeOption }
/>
<Input
style={{ width: '350px'}}
className="input-small ml-3"
placeholder="Filter by Name or Message"
icon="search"
iconPosition="left"
name="filter"
onChange={ onFilterChange }
/>
</div>
</div>
<Divider />
<NoContent
title="No Errors Found!"
subtext="Please try to change your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
>
<Loader loading={ loading }>
{ _list.take(displayedCount).map(e =>
<>
className="input-small ml-3"
placeholder="Filter by Name or Message"
icon="search"
iconPosition="left"
name="filter"
onChange={ this.onQueryChange }
value={query}
/>
</div>
</div>
<Divider />
<NoContent
title="No Errors Found!"
subtext="Please try to change your search parameters."
icon="exclamation-circle"
show={ !loading && list.size === 0}
>
<Loader loading={ loading }>
{ list.map(e =>
<div key={e.errorId}>
<ListItem
disabled={someLoading}
key={e.errorId}
@ -217,16 +233,19 @@ export default class List extends React.PureComponent {
onCheck={ this.check }
/>
<Divider/>
</>
</div>
)}
<LoadMoreButton
className="mt-3"
displayedCount={displayedCount}
totalCount={list.size}
onClick={this.addPage}
/>
</Loader>
</NoContent>
<div className="w-full flex items-center justify-center mt-4">
<Pagination
page={currentPage}
totalPages={Math.ceil(total / PER_PAGE)}
onPageChange={(page) => this.props.updateCurrentPage(page)}
limit={PER_PAGE}
debounceRequest={500}
/>
</div>
</Loader>
</NoContent>
</div>
);
}

View file

@ -6,9 +6,19 @@ import { connect } from 'react-redux';
import { setActiveStages } from 'Duck/funnels';
import { Styles } from '../../Dashboard/Widgets/common';
import { numberWithCommas } from 'App/utils'
import { truncate } from 'App/utils'
const MIN_BAR_HEIGHT = 20;
function CustomTick(props) {
const { x, y, payload } = props;
return (
<g transform={`translate(${x},${y})`}>
<text x={0} y={0} dy={16} fontSize={12} textAnchor="middle" fill="#666">{payload.value}</text>
</g>
);
}
function FunnelGraph(props) {
const { data, activeStages, funnelId, liveFilters } = props;
const [activeIndex, setActiveIndex] = useState(activeStages)
@ -118,13 +128,29 @@ function FunnelGraph(props) {
)
}
const CustomTooltip = ({ active, payload, msg = '' }) => {
const CustomTooltip = (props) => {
const { payload } = props;
if (payload.length === 0) return null;
const { value, headerText } = payload[0].payload;
// const value = payload[0].payload.value;
if (!value) return null;
return (
<div className="rounded border bg-white p-2">
<p className="text-sm">{msg}</p>
<div className="rounded border bg-white p-2">
<div>{headerText}</div>
{value.map(i => (
<div className="text-sm ml-2">{truncate(i, 30)}</div>
))}
</div>
);
)
};
// const CustomTooltip = ({ active, payload, msg = '' }) => {
// return (
// <div className="rounded border bg-white p-2">
// <p className="text-sm">{msg}</p>
// </div>
// );
// };
const TEMP = {}
@ -152,7 +178,9 @@ function FunnelGraph(props) {
background={'transparent'}
>
<CartesianGrid strokeDasharray="1 3" stroke="#BBB" vertical={false} />
{activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />}
{/* {activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} */}
<Tooltip cursor={{ fill: 'transparent' }} content={CustomTooltip} />
<Bar
dataKey="sessionsCount"
onClick={handleClick}
@ -210,7 +238,8 @@ function FunnelGraph(props) {
dataKey="label"
strokeWidth={0}
interval={0}
tick ={{ fill: '#666', fontSize: 12 }}
// tick ={{ fill: '#666', fontSize: 12 }}
tick={<CustomTick />}
xAxisId={0}
/>
{/* <XAxis

View file

@ -1,7 +1,7 @@
import React, { useEffect, useState } from 'react';
import { Icon, BackLink, IconButton, Dropdown, Popup, TextEllipsis, Button } from 'UI';
import { remove as deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered } from 'Duck/funnels';
import { editFilter, addFilter } from 'Duck/funnels';
import { editFilter, refresh, addFilter } from 'Duck/funnels';
import DateRange from 'Shared/DateRange';
import { connect } from 'react-redux';
import { confirm } from 'UI/Confirmation';
@ -19,15 +19,11 @@ const Info = ({ label = '', value = '', className = 'mx-4' }) => {
const FunnelHeader = (props) => {
const { funnel, insights, funnels, onBack, funnelId, showFilters = false, renameHandler } = props;
const [showSaveModal, setShowSaveModal] = useState(false)
const writeOption = (e, { name, value }) => {
props.fetch(value)
props.fetchInsights(value, {})
props.fetchIssuesFiltered(value, {})
props.fetchSessionsFiltered(value, {})
props.redirect(value)
props.fetch(value).then(() => props.refresh(value))
}
const deleteFunnel = async (e, funnel) => {
@ -44,11 +40,12 @@ const FunnelHeader = (props) => {
}
const onDateChange = (e) => {
props.editFilter(e, funnel.funnelId);
props.editFilter(e, funnelId);
}
const options = funnels.map(({ funnelId, name }) => ({ text: name, value: funnelId })).toJS();
const selectedFunnel = funnels.filter(i => i.funnelId === parseInt(funnelId)).first() || {};
const eventsCount = funnel.filter.filters.filter(i => i.isEvent).size;
return (
<div>
@ -75,7 +72,7 @@ const FunnelHeader = (props) => {
selectOnBlur={false}
icon={ <Icon name="chevron-down" color="gray-dark" size="14" className={stl.dropdownIcon} /> }
/>
<Info label="Events" value={funnel.filter.filters.size} />
<Info label="Events" value={eventsCount} />
<span>-</span>
<Button plain onClick={props.toggleFilters}>{ showFilters ? 'HIDE' : 'EDIT FUNNEL' }</Button>
<Info label="Sessions" value={insights.sessionsCount} />
@ -113,4 +110,4 @@ const FunnelHeader = (props) => {
export default connect(state => ({
funnel: state.getIn([ 'funnels', 'instance' ]),
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered })(FunnelHeader)
}), { editFilter, deleteFunnel, fetch, fetchInsights, fetchIssuesFiltered, fetchSessionsFiltered, refresh })(FunnelHeader)

View file

@ -8,14 +8,14 @@ const DOCUMENTATION = 'NPM';
// const SEGMENT = 'SEGMENT';
// const GOOGLE_TAG = 'GOOGLE TAG';
const TABS = [
{ key: PROJECT, text: PROJECT },
{ key: DOCUMENTATION, text: DOCUMENTATION },
{ key: PROJECT, text: PROJECT },
// { key: SEGMENT, text: SEGMENT },
// { key: GOOGLE_TAG, text: GOOGLE_TAG }
];
class TrackingCodeModal extends React.PureComponent {
state = { copied: false, changed: false, activeTab: PROJECT };
state = { copied: false, changed: false, activeTab: DOCUMENTATION };
setActiveTab = (tab) => {
this.setState({ activeTab: tab });

View file

@ -9,6 +9,7 @@ import Controls from './Controls';
import Overlay from './Overlay';
import stl from './player.css';
import EventsToggleButton from '../../Session/EventsToggleButton';
import { updateLastPlayedSession } from 'Duck/sessions';
@connectPlayer(state => ({
live: state.live,
@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton';
return {
fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]),
nextId: state.getIn([ 'sessions', 'nextId' ]),
sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]),
closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])),
}
}, {
hideTargetDefiner,
fullscreenOff,
updateLastPlayedSession,
})
export default class Player extends React.PureComponent {
screenWrapper = React.createRef();
componentDidMount() {
this.props.updateLastPlayedSession(this.props.sessionId);
if (this.props.closedLive) return;
const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture

View file

@ -36,7 +36,7 @@ function FilterSeries(props: Props) {
const onAddFilter = (filter) => {
filter.value = [""]
if (filter.hasOwnProperty('filters')) {
if (filter.hasOwnProperty('filters') && Array.isArray(filter.filters)) {
filter.filters = filter.filters.map(i => ({ ...i, value: [""] }))
}
props.addSeriesFilterFilter(seriesIndex, filter);

View file

@ -1,7 +1,7 @@
import React, { useEffect } from 'react';
import { fetchLiveList } from 'Duck/sessions';
import { connect } from 'react-redux';
import { NoContent, Loader, LoadMoreButton } from 'UI';
import { NoContent, Loader, LoadMoreButton, Pagination } from 'UI';
import { List, Map } from 'immutable';
import SessionItem from 'Shared/SessionItem';
import withPermissions from 'HOCs/withPermissions'
@ -12,11 +12,11 @@ import { addFilterByKeyAndValue, updateCurrentPage, updateSort } from 'Duck/live
import DropdownPlain from 'Shared/DropdownPlain';
import SortOrderButton from 'Shared/SortOrderButton';
import { TimezoneDropdown } from 'UI';
import { capitalize } from 'App/utils';
import { capitalize, sliceListPerPage } from 'App/utils';
import LiveSessionReloadButton from 'Shared/LiveSessionReloadButton';
const AUTOREFRESH_INTERVAL = .5 * 60 * 1000
const PER_PAGE = 20;
const PER_PAGE = 10;
interface Props {
loading: Boolean,
@ -42,9 +42,8 @@ function LiveSessionList(props: Props) {
text: capitalize(i), value: i
})).toJS();
const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size);
const addPage = () => props.updateCurrentPage(props.currentPage + 1)
// const displayedCount = Math.min(currentPage * PER_PAGE, sessions.size);
// const addPage = () => props.updateCurrentPage(props.currentPage + 1)
useEffect(() => {
if (filters.size === 0) {
@ -135,6 +134,7 @@ function LiveSessionList(props: Props) {
<SortOrderButton onChange={(state) => props.updateSort({ order: state })} sortOrder={sort.order} />
</div>
</div>
<NoContent
title={"No live sessions."}
subtext={
@ -147,9 +147,9 @@ function LiveSessionList(props: Props) {
show={ !loading && sessions && sessions.size === 0}
>
<Loader loading={ loading }>
{sessions && sessions.sortBy(i => i.metadata[sort.field]).update(list => {
{sessions && sliceListPerPage(sessions.sortBy(i => i.metadata[sort.field]).update(list => {
return sort.order === 'desc' ? list.reverse() : list;
}).take(displayedCount).map(session => (
}), currentPage - 1).map(session => (
<SessionItem
key={ session.sessionId }
session={ session }
@ -160,12 +160,14 @@ function LiveSessionList(props: Props) {
/>
))}
<LoadMoreButton
className="my-6"
displayedCount={displayedCount}
totalCount={sessions.size}
onClick={addPage}
<div className="w-full flex items-center justify-center py-6">
<Pagination
page={currentPage}
totalPages={Math.ceil(sessions.size / PER_PAGE)}
onPageChange={(page) => props.updateCurrentPage(page)}
limit={PER_PAGE}
/>
</div>
</Loader>
</NoContent>
</div>

View file

@ -3,29 +3,25 @@ import cn from 'classnames';
import {
Link,
Icon,
OsIcon,
BrowserIcon,
CountryFlag,
Avatar,
TextEllipsis,
Label,
} from 'UI';
import { deviceTypeIcon } from 'App/iconNames';
import { toggleFavorite, setSessionPath } from 'Duck/sessions';
import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes';
import { durationFormatted, formatTimeOrDate } from 'App/date';
import stl from './sessionItem.css';
import LiveTag from 'Shared/LiveTag';
import Bookmark from 'Shared/Bookmark';
import Counter from './Counter'
import { withRouter } from 'react-router-dom';
import SessionMetaList from './SessionMetaList';
import ErrorBars from './ErrorBars';
import { assist as assistRoute, liveSession, isRoute } from "App/routes";
import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes";
import { capitalize } from 'App/utils';
const ASSIST_ROUTE = assistRoute();
const ASSIST_LIVE_SESSION = liveSession()
const SESSIONS_ROUTE = sessionsRoute();
// const Label = ({ label = '', color = 'color-gray-medium'}) => (
// <div className={ cn('font-light text-sm', color)}>{label}</div>
@ -69,10 +65,13 @@ export default class SessionItem extends React.PureComponent {
disableUser = false,
metaList = [],
showActive = false,
lastPlayedSessionId,
} = this.props;
const formattedDuration = durationFormatted(duration);
const hasUserId = userId || userAnonymousId;
const isSessions = isRoute(SESSIONS_ROUTE, this.props.location.pathname);
const isAssist = isRoute(ASSIST_ROUTE, this.props.location.pathname) || isRoute(ASSIST_LIVE_SESSION, this.props.location.pathname);
const isLastPlayed = lastPlayedSessionId === sessionId;
const _metaList = Object.keys(metadata).filter(i => metaList.includes(i)).map(key => {
const value = metadata[key];
@ -125,7 +124,7 @@ export default class SessionItem extends React.PureComponent {
</span>
</div>
</div>
{ !isAssist && (
{ isSessions && (
<div style={{ width: "10%"}} className="self-center px-2 flex items-center">
<ErrorBars count={issueTypes.length} />
</div>
@ -139,6 +138,15 @@ export default class SessionItem extends React.PureComponent {
</Label>
)}
<div className={ stl.playLink } id="play-button" data-viewed={ viewed }>
{ isSessions && (
<div className="mr-4 flex-shrink-0 w-24">
{ isLastPlayed && (
<Label className="bg-gray-lightest p-1 px-2 rounded-lg">
<span className="color-gray-medium text-xs" style={{ whiteSpace: 'nowrap'}}>LAST PLAYED</span>
</Label>
)}
</div>
)}
<Link to={ isAssist ? liveSessionRoute(sessionId) : sessionRoute(sessionId) }>
<Icon name={ !viewed && !isAssist ? 'play-fill' : 'play-circle-light' } size="42" color={isAssist ? "tealx" : "teal"} />
</Link>

View file

@ -10,12 +10,12 @@ import cn from 'classnames';
const PROJECT = 'Using Script';
const DOCUMENTATION = 'Using NPM';
const TABS = [
{ key: DOCUMENTATION, text: DOCUMENTATION },
{ key: PROJECT, text: PROJECT },
{ key: DOCUMENTATION, text: DOCUMENTATION }
];
class TrackingCodeModal extends React.PureComponent {
state = { copied: false, changed: false, activeTab: PROJECT };
state = { copied: false, changed: false, activeTab: DOCUMENTATION };
setActiveTab = (tab) => {
this.setState({ activeTab: tab });

View file

@ -21,7 +21,7 @@ function DropdownPlain({ name, label, options, onChange, defaultValue, wrapperSt
options={ options }
onChange={ onChange }
defaultValue={ defaultValue || options[ 0 ].value }
icon={null}
// icon={null}
disabled={disabled}
icon={ <Icon name="chevron-down" color="gray-dark" size="14" className={stl.dropdownIcon} /> }
/>

View file

@ -0,0 +1,77 @@
import React from 'react'
import { Icon } from 'UI'
import cn from 'classnames'
import { debounce } from 'App/utils';
import { Tooltip } from 'react-tippy';
interface Props {
page: number
totalPages: number
onPageChange: (page: number) => void
limit?: number
debounceRequest?: number
}
export default function Pagination(props: Props) {
const { page, totalPages, onPageChange, limit = 5, debounceRequest = 0 } = props;
const [currentPage, setCurrentPage] = React.useState(page);
React.useMemo(
() => setCurrentPage(page),
[page],
);
const debounceChange = React.useCallback(debounce(onPageChange, debounceRequest), []);
const changePage = (page: number) => {
if (page > 0 && page <= totalPages) {
setCurrentPage(page);
debounceChange(page);
}
}
const isFirstPage = currentPage === 1;
const isLastPage = currentPage === totalPages;
return (
<div className="flex items-center">
<Tooltip
arrow
sticky
title="Previous Page"
trigger="mouseenter"
hideOnClick={true}
>
<button
className={cn("py-2 px-3", { "opacity-50 cursor-default": isFirstPage })}
disabled={isFirstPage}
onClick={() => changePage(currentPage - 1)}
>
<Icon name="chevron-left" size="18" color={isFirstPage ? 'gray-medium' : 'teal'} />
</button>
</Tooltip>
<span className="mr-2 color-gray-medium">Page</span>
<input
type="number"
className={cn("py-1 px-2 bg-white border border-gray-light rounded w-16", { "opacity-50 cursor-default": totalPages === 1 })}
value={currentPage}
min={1}
max={totalPages}
onChange={(e) => changePage(parseInt(e.target.value))}
/>
<span className="mx-3 color-gray-medium">of</span>
<span >{totalPages}</span>
<Tooltip
arrow
sticky
title="Next Page"
trigger="mouseenter"
hideOnClick={true}
>
<button
className={cn("py-2 px-3", { "opacity-50 cursor-default": isLastPage })}
disabled={isLastPage}
onClick={() => changePage(currentPage + 1)}
>
<Icon name="chevron-right" size="18" color={isLastPage ? 'gray-medium' : 'teal'} />
</button>
</Tooltip>
</div>
)
}

View file

@ -0,0 +1 @@
export { default } from './Pagination';

View file

@ -55,5 +55,6 @@ export { default as HighlightCode } from './HighlightCode';
export { default as NoPermission } from './NoPermission';
export { default as NoSessionPermission } from './NoSessionPermission';
export { default as HelpText } from './HelpText';
export { default as Pagination } from './Pagination';
export { Input, Modal, Form, Message, Card } from 'semantic-ui-react';

View file

@ -1,4 +1,4 @@
import { FilterKey } from 'Types/filter/filterType';
import { FilterKey, IssueType } from 'Types/filter/filterType';
export const options = [
{ key: 'on', text: 'on', value: 'on' },
@ -93,18 +93,18 @@ export const methodOptions = [
]
export const issueOptions = [
{ text: 'Click Rage', value: 'click_rage' },
{ text: 'Dead Click', value: 'dead_click' },
{ text: 'Excessive Scrolling', value: 'excessive_scrolling' },
{ text: 'Bad Request', value: 'bad_request' },
{ text: 'Missing Resource', value: 'missing_resource' },
{ text: 'Memory', value: 'memory' },
{ text: 'CPU', value: 'cpu' },
{ text: 'Slow Resource', value: 'slow_resource' },
{ text: 'Slow Page Load', value: 'slow_page_load' },
{ text: 'Crash', value: 'crash' },
{ text: 'Custom', value: 'custom' },
{ text: 'JS Exception', value: 'js_exception' },
{ text: 'Click Rage', value: IssueType.CLICK_RAGE },
{ text: 'Dead Click', value: IssueType.DEAD_CLICK },
{ text: 'Excessive Scrolling', value: IssueType.EXCESSIVE_SCROLLING },
{ text: 'Bad Request', value: IssueType.BAD_REQUEST },
{ text: 'Missing Resource', value: IssueType.MISSING_RESOURCE },
{ text: 'Memory', value: IssueType.MEMORY },
{ text: 'CPU', value: IssueType.CPU },
{ text: 'Slow Resource', value: IssueType.SLOW_RESOURCE },
{ text: 'Slow Page Load', value: IssueType.SLOW_PAGE_LOAD },
{ text: 'Crash', value: IssueType.CRASH },
{ text: 'Custom', value: IssueType.CUSTOM },
{ text: 'Error', value: IssueType.JS_EXCEPTION },
]
export default {

View file

@ -1,13 +1,18 @@
import { List, Map } from 'immutable';
import { clean as cleanParams } from 'App/api_client';
import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED } from 'Types/errorInfo';
import ErrorInfo, { RESOLVED, UNRESOLVED, IGNORED, BOOKMARK } from 'Types/errorInfo';
import { createFetch, fetchListType, fetchType } from './funcTools/crud';
import { createRequestReducer, ROOT_KEY } from './funcTools/request';
import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools';
import { reduceThenFetchResource } from './search'
const name = "error";
const idKey = "errorId";
const PER_PAGE = 10;
const DEFAULT_SORT = 'occurrence';
const DEFAULT_ORDER = 'desc';
const EDIT_OPTIONS = `${name}/EDIT_OPTIONS`;
const FETCH_LIST = fetchListType(name);
const FETCH = fetchType(name);
const FETCH_NEW_ERRORS_COUNT = fetchType('errors/FETCH_NEW_ERRORS_COUNT');
@ -18,6 +23,7 @@ const MERGE = "errors/MERGE";
const TOGGLE_FAVORITE = "errors/TOGGLE_FAVORITE";
const FETCH_TRACE = "errors/FETCH_TRACE";
const UPDATE_CURRENT_PAGE = "errors/UPDATE_CURRENT_PAGE";
const UPDATE_KEY = `${name}/UPDATE_KEY`;
function chartWrapper(chart = []) {
return chart.map(point => ({ ...point, count: Math.max(point.count, 0) }));
@ -35,13 +41,23 @@ const initialState = Map({
instanceTrace: List(),
stats: Map(),
sourcemapUploaded: true,
currentPage: 1,
currentPage: 1,
options: Map({
sort: DEFAULT_SORT,
order: DEFAULT_ORDER,
status: UNRESOLVED,
query: '',
}),
// sort: DEFAULT_SORT,
// order: DEFAULT_ORDER,
});
function reducer(state = initialState, action = {}) {
let updError;
switch (action.type) {
case EDIT_OPTIONS:
return state.mergeIn(["options"], action.instance);
case success(FETCH):
return state.set("instance", ErrorInfo(action.data));
case success(FETCH_TRACE):
@ -69,8 +85,10 @@ function reducer(state = initialState, action = {}) {
return state.update("list", list => list.filter(e => !ids.includes(e.errorId)));
case success(FETCH_NEW_ERRORS_COUNT):
return state.set('stats', action.data);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
case UPDATE_KEY:
return state.set(action.key, action.value);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
}
return state;
}
@ -106,14 +124,32 @@ export function fetchTrace(id) {
}
}
export function fetchList(params = {}, clear = false) {
return {
types: array(FETCH_LIST),
call: client => client.post('/errors/search', params),
clear,
params: cleanParams(params),
};
}
export const fetchList = (params = {}, clear = false) => (dispatch, getState) => {
params.page = getState().getIn(['errors', 'currentPage']);
params.limit = PER_PAGE;
const options = getState().getIn(['errors', 'options']).toJS();
if (options.status === BOOKMARK) {
options.bookmarked = true;
options.status = 'all';
}
return dispatch({
types: array(FETCH_LIST),
call: client => client.post('/errors/search', { ...params, ...options }),
clear,
params: cleanParams(params),
});
};
// export function fetchList(params = {}, clear = false) {
// return {
// types: array(FETCH_LIST),
// call: client => client.post('/errors/search', params),
// clear,
// params: cleanParams(params),
// };
// }
export function fetchBookmarks() {
return {
@ -169,9 +205,12 @@ export function fetchNewErrorsCount(params = {}) {
}
}
export function updateCurrentPage(page) {
return {
type: 'errors/UPDATE_CURRENT_PAGE',
export const updateCurrentPage = reduceThenFetchResource((page) => ({
type: UPDATE_CURRENT_PAGE,
page,
};
}
}));
export const editOptions = reduceThenFetchResource((instance) => ({
type: EDIT_OPTIONS,
instance
}));

View file

@ -7,7 +7,7 @@ import SavedFilter from 'Types/filter/savedFilter';
import { errors as errorsRoute, isRoute } from "App/routes";
import { fetchList as fetchSessionList } from './sessions';
import { fetchList as fetchErrorsList } from './errors';
import { FilterCategory, FilterKey } from 'Types/filter/filterType';
import { FilterCategory, FilterKey, IssueType } from 'Types/filter/filterType';
import { filtersMap, liveFiltersMap, generateFilterOptions, generateLiveFilterOptions } from 'Types/filter/newFilter';
const ERRORS_ROUTE = errorsRoute();
@ -28,6 +28,8 @@ const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`;
const UPDATE = `${name}/UPDATE`;
const APPLY = `${name}/APPLY`;
const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`;
const UPDATE_CURRENT_PAGE = `${name}/UPDATE_CURRENT_PAGE`;
const SET_ACTIVE_TAB = `${name}/SET_ACTIVE_TAB`;
const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS';
@ -49,6 +51,8 @@ const initialState = Map({
instance: new Filter({ filters: [] }),
savedSearch: new SavedFilter({}),
filterSearchList: {},
currentPage: 1,
activeTab: {name: 'All', type: 'all' },
});
// Metric - Series - [] - filters
@ -62,7 +66,7 @@ function reducer(state = initialState, action = {}) {
case APPLY:
return action.fromUrl
? state.set('instance', Filter(action.filter))
: state.mergeIn(['instance'], action.filter);
: state.mergeIn(['instance'], action.filter).set('currentPage', 1);
case success(FETCH):
return state.set("instance", action.data);
case success(FETCH_LIST):
@ -83,6 +87,10 @@ function reducer(state = initialState, action = {}) {
return state.set('savedSearch', action.filter);
case EDIT_SAVED_SEARCH:
return state.mergeIn([ 'savedSearch' ], action.instance);
case UPDATE_CURRENT_PAGE:
return state.set('currentPage', action.page);
case SET_ACTIVE_TAB:
return state.set('activeTab', action.tab).set('currentPage', 1);
}
return state;
}
@ -118,10 +126,24 @@ export const filterMap = ({category, value, key, operator, sourceOperator, sourc
filters: filters ? filters.map(filterMap) : [],
});
const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
export const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
dispatch(actionCreator(...args));
const filter = getState().getIn([ 'search', 'instance']).toData();
const activeTab = getState().getIn([ 'search', 'activeTab']);
if (activeTab.type !== 'all' && activeTab.type !== 'bookmark') {
const tmpFilter = filtersMap[FilterKey.ISSUE];
tmpFilter.value = [activeTab.type]
filter.filters = filter.filters.concat(tmpFilter)
}
if (activeTab.type === 'bookmark') {
filter.bookmarked = true
}
filter.filters = filter.filters.map(filterMap);
filter.limit = 10;
filter.page = getState().getIn([ 'search', 'currentPage']);
return isRoute(ERRORS_ROUTE, window.location.pathname)
? dispatch(fetchErrorsList(filter))
@ -133,6 +155,11 @@ export const edit = reduceThenFetchResource((instance) => ({
instance,
}));
export const setActiveTab = reduceThenFetchResource((tab) => ({
type: SET_ACTIVE_TAB,
tab
}));
export const remove = (id) => (dispatch, getState) => {
return dispatch({
types: REMOVE.array,
@ -152,6 +179,11 @@ export const applyFilter = reduceThenFetchResource((filter, fromUrl=false) => ({
fromUrl,
}));
export const updateCurrentPage = reduceThenFetchResource((page) => ({
type: UPDATE_CURRENT_PAGE,
page,
}));
export const applySavedSearch = (filter) => (dispatch, getState) => {
dispatch(edit({ filters: filter ? filter.filter.filters : [] }));
return dispatch({

View file

@ -7,9 +7,9 @@ import withRequestState, { RequestTypes } from './requestStateCreator';
import { getRE } from 'App/utils';
import { LAST_7_DAYS } from 'Types/app/period';
import { getDateRangeFromValue } from 'App/dateRange';
const name = 'sessions';
const INIT = 'sessions/INIT';
const FETCH_LIST = new RequestTypes('sessions/FETCH_LIST');
const FETCH = new RequestTypes('sessions/FETCH');
const FETCH_FAVORITE_LIST = new RequestTypes('sessions/FETCH_FAVORITE_LIST');
@ -26,6 +26,7 @@ const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW';
const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG';
const SET_TIMELINE_POINTER = 'sessions/SET_TIMELINE_POINTER';
const SET_SESSION_PATH = 'sessions/SET_SESSION_PATH';
const LAST_PLAYED_SESSION_ID = `${name}/LAST_PLAYED_SESSION_ID`;
const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB';
@ -60,6 +61,7 @@ const initialState = Map({
funnelPage: Map(),
timelinePointer: null,
sessionPath: '',
lastPlayedSessionId: null,
});
const reducer = (state = initialState, action = {}) => {
@ -248,11 +250,21 @@ const reducer = (state = initialState, action = {}) => {
return state.set('timelinePointer', action.pointer);
case SET_SESSION_PATH:
return state.set('sessionPath', action.path);
case LAST_PLAYED_SESSION_ID:
return updateListItem(state, action.sessionId, { viewed: true }).set('lastPlayedSessionId', action.sessionId);
default:
return state;
}
};
function updateListItem(state, sourceSessionId, instance) {
const list = state.get('list');
const index = list.findIndex(({ sessionId }) => sessionId === sourceSessionId);
if (index === -1) return state;
return state.updateIn([ 'list', index ], session => session.merge(instance));
}
export default withRequestState({
_: [ FETCH, FETCH_LIST ],
fetchLiveListRequest: FETCH_LIVE_LIST,
@ -390,4 +402,11 @@ export function setSessionPath(path) {
type: SET_SESSION_PATH,
path
}
}
export function updateLastPlayedSession(sessionId) {
return {
type: LAST_PLAYED_SESSION_ID,
sessionId,
};
}

View file

@ -31,16 +31,6 @@ export default abstract class BaseScreen {
const screen = document.createElement('div');
setTimeout(function() {
iframe.contentDocument?.addEventListener('mousemove', function() {
overlay.style.display = 'block';
})
overlay.addEventListener('contextmenu', function() {
overlay.style.display = 'none';
})
}, 10)
screen.className = styles.screen;
screen.appendChild(iframe);
screen.appendChild(overlay);
@ -58,6 +48,20 @@ export default abstract class BaseScreen {
// parentElement.onresize = this.scale;
window.addEventListener('resize', this.scale);
this.scale();
/* == For the Inspecting Document content == */
this.overlay.addEventListener('contextmenu', () => {
this.overlay.style.display = 'none'
const doc = this.document
if (!doc) { return }
const returnOverlay = () => {
this.overlay.style.display = 'block'
doc.removeEventListener('mousemove', returnOverlay)
doc.removeEventListener('mouseclick', returnOverlay) // TODO: prevent default in case of input selection
}
doc.addEventListener('mousemove', returnOverlay)
doc.addEventListener('mouseclick', returnOverlay)
})
}
get window(): WindowProxy | null {
@ -70,10 +74,10 @@ export default abstract class BaseScreen {
private boundingRect: DOMRect | null = null;
private getBoundingClientRect(): DOMRect {
//if (this.boundingRect === null) {
return this.boundingRect = this.overlay.getBoundingClientRect(); // expensive operation?
//}
//return this.boundingRect;
if (this.boundingRect === null) {
return this.boundingRect = this.overlay.getBoundingClientRect() // expensive operation?
}
return this.boundingRect
}
getInternalViewportCoordinates({ x, y }: Point): Point {
@ -85,17 +89,22 @@ export default abstract class BaseScreen {
const screenX = (x - overlayX) * scale;
const screenY = (y - overlayY) * scale;
return { x: screenX, y: screenY };
return { x: Math.round(screenX), y: Math.round(screenY) };
}
getCurrentScroll(): Point {
const docEl = this.document?.documentElement
const x = docEl ? docEl.scrollLeft : 0
const y = docEl ? docEl.scrollTop : 0
return { x, y }
}
getInternalCoordinates(p: Point): Point {
const { x, y } = this.getInternalViewportCoordinates(p);
const docEl = this.document?.documentElement
const scrollX = docEl ? docEl.scrollLeft : 0
const scrollY = docEl ? docEl.scrollTop : 0
const sc = this.getCurrentScroll()
return { x: x+scrollX, y: y+scrollY };
return { x: x+sc.x, y: y+sc.y };
}
getElementFromInternalPoint({ x, y }: Point): Element | null {

View file

@ -1,4 +1,5 @@
.screen {
user-select: none;
overflow: hidden;
position: absolute;
transform-origin: left top;

View file

@ -0,0 +1,85 @@
export default class AnnotationCanvas {
readonly canvas: HTMLCanvasElement
private ctx: CanvasRenderingContext2D | null = null
private painting: boolean = false
constructor() {
this.canvas = document.createElement('canvas')
Object.assign(this.canvas.style, {
position: "fixed",
cursor: "url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAAXNSR0IArs4c6QAAAWNJREFUOE+l1D1Lw1AUBuD35Catg5NzaCMRMilINnGok7sguLg4OlRcBTd/hqBVB0ed7KDgIPgXhJoaG10Kgk4a83EkhcYYktimd703z31zzuESSqwGIDs1bRvAIiRcWrZ9ETFUwhJ6XTsDsPH7Le1bz08H42JkGMa09+W2CVhKBmHC7jhYlOgUTPdUEa3Q86+SIDN/j4olf43BtJMFjoJl1AgMUJMUcRInZHT+w7KgYakGoDxVafmue0hBsJeLmaapvPffziFhraDjDMKWZdvHRaNRlCi2mUNHYl55dBwrDysFZWGloTQ2EZTEJoZiTFXVmaos34Ixn9e5qNgCaHR6vW7emcFozNVmN1ERbfb9myww3bVCTK9rPsDrpCh37HnXAC3Ek5lqf9ErM0im1zUG8BmGtCqq4mEIjppoeEESA5g/JIkaLMuv7AVHEgfNohqlU/7Fol3mPodiufvS7Yz7cP4ARjbPWyYPZSMAAAAASUVORK5CYII=') 0 20, crosshair",
left: 0,
top: 0,
//zIndex: 2147483647 - 2,
})
}
isPainting() {
return this.painting
}
private resizeCanvas = () => {
if (!this.canvas.parentElement) { return }
this.canvas.width = this.canvas.parentElement.offsetWidth
this.canvas.height = this.canvas.parentElement.offsetHeight
}
private lastPosition: [number, number] = [0,0]
start = (p: [number, number]) => {
this.painting = true
this.clrTmID && clearTimeout(this.clrTmID)
this.lastPosition = p
}
stop = () => {
if (!this.painting) { return }
this.painting = false
this.fadeOut()
}
move = (p: [number, number]) =>{
if (!this.ctx || !this.painting) { return }
this.ctx.globalAlpha = 1.0
this.ctx.beginPath()
this.ctx.moveTo(this.lastPosition[0], this.lastPosition[1])
this.ctx.lineTo(p[0], p[1])
this.ctx.lineWidth = 8
this.ctx.lineCap = "round"
this.ctx.lineJoin = "round"
this.ctx.strokeStyle = "red"
this.ctx.stroke()
this.lastPosition = p
}
clrTmID: ReturnType<typeof setTimeout> | null = null
private fadeOut() {
let timeoutID: ReturnType<typeof setTimeout>
const fadeStep = () => {
if (!this.ctx || this.painting ) { return }
this.ctx.globalCompositeOperation = 'destination-out'
this.ctx.fillStyle = "rgba(255, 255, 255, 0.1)"
this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
this.ctx.globalCompositeOperation = 'source-over'
timeoutID = setTimeout(fadeStep,100)
}
this.clrTmID = setTimeout(() => {
clearTimeout(timeoutID)
this.ctx &&
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height)
}, 3700)
fadeStep()
}
mount(parent: HTMLElement) {
parent.appendChild(this.canvas)
this.ctx = this.canvas.getContext("2d")
window.addEventListener("resize", this.resizeCanvas)
this.resizeCanvas()
}
remove() {
if (this.canvas.parentNode){
this.canvas.parentNode.removeChild(this.canvas)
}
window.removeEventListener("resize", this.resizeCanvas)
}
}

View file

@ -7,8 +7,8 @@ import store from 'App/store';
import type { LocalStream } from './LocalStream';
import { update, getState } from '../../store';
import { iceServerConfigFromString } from 'App/utils'
import MStreamReader from '../messages/MStreamReader';;
import AnnotationCanvas from './AnnotationCanvas';
import MStreamReader from '../messages/MStreamReader';
import JSONRawMessageReader from '../messages/JSONRawMessageReader'
export enum CallingState {
@ -136,12 +136,14 @@ export default class AssistManager {
//socket.onAny((...args) => console.log(...args))
socket.on("connect", () => {
waitingForMessages = true
this.setStatus(ConnectionStatus.WaitingMessages)
this.setStatus(ConnectionStatus.WaitingMessages) // TODO: happens frequently on bad network
})
socket.on("disconnect", () => {
this.toggleRemoteControl(false)
update({ calling: CallingState.NoCall })
})
socket.on('messages', messages => {
//console.log(messages.filter(m => m._id === 41 || m._id === 44))
showDisconnectTimeout && clearTimeout(showDisconnectTimeout);
jmr.append(messages) // as RawMessage[]
@ -173,14 +175,15 @@ export default class AssistManager {
this.setStatus(ConnectionStatus.Disconnected)
}, 30000)
if (getState().remoteControl === RemoteControlStatus.Requesting ||
getState().remoteControl === RemoteControlStatus.Enabled) {
this.toggleRemoteControl(false)
if (getState().remoteControl === RemoteControlStatus.Requesting) {
this.toggleRemoteControl(false) // else its remaining
}
// Call State
if (getState().calling === CallingState.OnCall) {
update({ calling: CallingState.Reconnecting })
} else if (getState().calling === CallingState.Requesting){
update({ calling: CallingState.NoCall })
}
})
socket.on('error', e => {
@ -200,7 +203,7 @@ export default class AssistManager {
private onMouseMove = (e: MouseEvent): void => {
if (!this.socket) { return }
const data = this.md.getInternalCoordinates(e)
this.socket.emit("move", [ Math.round(data.x), Math.round(data.y) ])
this.socket.emit("move", [ data.x, data.y ])
}
private onWheel = (e: WheelEvent): void => {
@ -213,15 +216,23 @@ export default class AssistManager {
private onMouseClick = (e: MouseEvent): void => {
if (!this.socket) { return; }
const data = this.md.getInternalViewportCoordinates(e);
const data = this.md.getInternalViewportCoordinates(e)
// const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager
const el = this.md.getElementFromInternalPoint(data)
if (el instanceof HTMLElement) {
el.focus()
el.oninput = e => e.preventDefault();
el.onkeydown = e => e.preventDefault();
el.oninput = e => {
if (el instanceof HTMLTextAreaElement
|| el instanceof HTMLInputElement
) {
this.socket && this.socket.emit("input", el.value)
} else if (el.isContentEditable) {
this.socket && this.socket.emit("input", el.innerText)
}
}
//el.onkeydown = e => e.preventDefault()
}
this.socket.emit("click", [ Math.round(data.x), Math.round(data.y) ]);
this.socket.emit("click", [ data.x, data.y ]);
}
private toggleRemoteControl(newState: boolean){
@ -310,6 +321,8 @@ export default class AssistManager {
this.callConnection && this.callConnection.close()
update({ calling: CallingState.NoCall })
this.callArgs = null
this.annot?.remove()
this.annot = null
}
private initiateCallEnd = () => {
@ -355,6 +368,8 @@ export default class AssistManager {
}
}
private annot: AnnotationCanvas | null = null
private _call() {
if (![CallingState.NoCall, CallingState.Reconnecting].includes(getState().calling)) { return }
update({ calling: CallingState.Connecting })
@ -379,6 +394,34 @@ export default class AssistManager {
call.on('stream', stream => {
update({ calling: CallingState.OnCall })
this.callArgs && this.callArgs.onStream(stream)
if (!this.annot) {
const annot = this.annot = new AnnotationCanvas()
annot.mount(this.md.overlay)
annot.canvas.addEventListener("mousedown", e => {
if (!this.socket) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.start([ data.x, data.y ])
this.socket.emit("startAnnotation", [ data.x, data.y ])
})
annot.canvas.addEventListener("mouseleave", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mouseup", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mousemove", e => {
if (!this.socket || !annot.isPainting()) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.move([ data.x, data.y ])
this.socket.emit("moveAnnotation", [ data.x, data.y ])
})
}
});
//call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
@ -409,6 +452,10 @@ export default class AssistManager {
this.socket.close()
document.removeEventListener('visibilitychange', this.onVisChange)
}
if (this.annot) {
this.annot.remove()
this.annot = null
}
}
}

View file

@ -113,8 +113,15 @@ export default class DOMManager extends ListWalker<Message> {
logger.error("Node has no childNodes", this.nl[ parentID ]);
return;
}
if (this.nl[ id ] instanceof HTMLHtmlElement) {
// What if some exotic cases?
this.nl[ parentID ].replaceChild(this.nl[ id ], childNodes[childNodes.length-1])
return
}
this.nl[ parentID ]
.insertBefore(this.nl[ id ], childNodes[ index ]);
.insertBefore(this.nl[ id ], childNodes[ index ])
}
private applyMessage = (msg: Message): void => {
@ -257,14 +264,14 @@ export default class DOMManager extends ListWalker<Message> {
case "create_i_frame_document":
node = this.nl[ msg.frameID ];
// console.log('ifr', msg, node)
if (node instanceof HTMLIFrameElement) {
doc = node.contentDocument;
if (!doc) {
logger.warn("No iframe doc", msg, node, node.contentDocument);
return;
}
this.nl[ msg.id ] = doc.documentElement
this.nl[ msg.id ] = doc
return;
} else if (node instanceof Element) { // shadow DOM
try {

View file

@ -62,7 +62,7 @@
.color-white { color: $white }
.color-borderColor { color: $borderColor }
/* color */
/* hover color */
.hover-main:hover { color: $main }
.hover-gray-light-shade:hover { color: $gray-light-shade }
.hover-gray-lightest:hover { color: $gray-lightest }
@ -92,3 +92,33 @@
.hover-pink:hover { color: $pink }
.hover-white:hover { color: $white }
.hover-borderColor:hover { color: $borderColor }
.border-main { border-color: $main }
.border-gray-light-shade { border-color: $gray-light-shade }
.border-gray-lightest { border-color: $gray-lightest }
.border-gray-light { border-color: $gray-light }
.border-gray-medium { border-color: $gray-medium }
.border-gray-dark { border-color: $gray-dark }
.border-gray-darkest { border-color: $gray-darkest }
.border-teal { border-color: $teal }
.border-teal-dark { border-color: $teal-dark }
.border-teal-light { border-color: $teal-light }
.border-tealx { border-color: $tealx }
.border-tealx-light { border-color: $tealx-light }
.border-tealx-light-border { border-color: $tealx-light-border }
.border-orange { border-color: $orange }
.border-yellow { border-color: $yellow }
.border-yellow2 { border-color: $yellow2 }
.border-orange-dark { border-color: $orange-dark }
.border-green { border-color: $green }
.border-green2 { border-color: $green2 }
.border-green-dark { border-color: $green-dark }
.border-red { border-color: $red }
.border-red2 { border-color: $red2 }
.border-blue { border-color: $blue }
.border-blue2 { border-color: $blue2 }
.border-active-blue { border-color: $active-blue }
.border-active-blue-border { border-color: $active-blue-border }
.border-pink { border-color: $pink }
.border-white { border-color: $white }
.border-borderColor { border-color: $borderColor }

View file

@ -1,4 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-chevron-double-left" viewBox="0 0 16 16">
<svg xmlns="http://www.w3.org/2000/svg" fill="currentColor" class="bi bi-chevron-double-left" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M8.354 1.646a.5.5 0 0 1 0 .708L2.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
<path fill-rule="evenodd" d="M12.354 1.646a.5.5 0 0 1 0 .708L6.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
</svg>

Before

Width:  |  Height:  |  Size: 447 B

After

Width:  |  Height:  |  Size: 424 B

View file

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" class="bi bi-chevron-left" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z"/>
</svg>

After

Width:  |  Height:  |  Size: 246 B

View file

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" class="bi bi-chevron-right" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z"/>
</svg>

After

Width:  |  Height:  |  Size: 248 B

View file

@ -5,6 +5,7 @@ import Session from './session';
export const RESOLVED = "resolved";
export const UNRESOLVED = "unresolved";
export const IGNORED = "ignored";
export const BOOKMARK = "bookmark";
function getStck0InfoString(stack) {

View file

@ -8,6 +8,21 @@ export enum FilterCategory {
PERFORMANCE = "Performance",
};
export enum IssueType {
CLICK_RAGE = "click_rage",
DEAD_CLICK = "dead_click",
EXCESSIVE_SCROLLING = "excessive_scrolling",
BAD_REQUEST = "bad_request",
MISSING_RESOURCE = "missing_resource",
MEMORY = "memory",
CPU = "cpu",
SLOW_RESOURCE = "slow_resource",
SLOW_PAGE_LOAD = "slow_page_load",
CRASH = "crash",
CUSTOM = "custom",
JS_EXCEPTION = "js_exception",
}
export enum FilterType {
STRING = "STRING",
ISSUE = "ISSUE",

View file

@ -14,7 +14,10 @@ const getRedableName = ({ type, value, operator }) => {
break;
case "INPUT":
str = 'Entered';
break;
break;
case "CUSTOM":
str = 'Custom Event';
break;
}
return `${str} ${operator}`;
@ -52,7 +55,7 @@ export default Record({
},
fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => {
let _stages = stages.map((stage, index) => {
// stage.label = getRedableName(stage.type, stage.value);
stage.headerText = getRedableName(stage.type, stage.value);
stage.label = `Step ${index + 1}`;
return stage;
});
@ -73,7 +76,7 @@ export default Record({
...rest,
stages: _stages.length > 0 ? _stages.map((stage, index) => {
if (!stage) return;
// stage.label = getRedableName(stage);
stage.headerText = getRedableName(stage);
stage.label = `Step ${index + 1}`;
return stage;
}) : [],

View file

@ -232,4 +232,10 @@ export const isGreaterOrEqualVersion = (version, compareTo) => {
const [major, minor, patch] = version.split("-")[0].split('.');
const [majorC, minorC, patchC] = compareTo.split("-")[0].split('.');
return (major > majorC) || (major === majorC && minor > minorC) || (major === majorC && minor === minorC && patch >= patchC);
}
export const sliceListPerPage = (list, page, perPage = 10) => {
const start = page * perPage;
const end = start + perPage;
return list.slice(start, end);
}

View file

@ -13,7 +13,7 @@ const oss = {
ORIGIN: () => 'window.location.origin',
API_EDP: () => 'window.location.origin + "/api"',
ASSETS_HOST: () => 'window.location.origin + "/assets"',
VERSION: '1.5.3',
VERSION: '1.5.4',
SOURCEMAP: true,
MINIO_ENDPOINT: process.env.MINIO_ENDPOINT,
MINIO_PORT: process.env.MINIO_PORT,
@ -21,7 +21,7 @@ const oss = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
ICE_SERVERS: process.env.ICE_SERVERS,
TRACKER_VERSION: '3.5.3' // trackerInfo.version,
TRACKER_VERSION: '3.5.4' // trackerInfo.version,
}
module.exports = {

View file

@ -12,7 +12,9 @@ ${ colors.map(color => `.fill-${ color } { fill: $${ color } }`).join('\n') }
/* color */
${ colors.map(color => `.color-${ color } { color: $${ color } }`).join('\n') }
/* color */
/* hover color */
${ colors.map(color => `.hover-${ color }:hover { color: $${ color } }`).join('\n') }
${ colors.map(color => `.border-${ color } { border-color: $${ color } }`).join('\n') }
`)

View file

@ -0,0 +1,91 @@
\set ON_ERROR_STOP true
SET client_min_messages TO NOTICE;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3'
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@ -898,7 +898,23 @@ $$
CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed');
CREATE TYPE job_action AS ENUM ('delete_user_data');

View file

@ -40,3 +40,7 @@ dependencies:
repository: file://charts/redis
version: 12.10.1
condition: redis.enabled
- name: minio
repository: file://charts/minio
version: 3.7.14
condition: minio.enabled

View file

@ -99,6 +99,9 @@ redis:
cpu: 100m
memory: 128Mi
minio:
enabled: true
postgresql:
# postgresqlPassword: asayerPostgres
fullnameOverride: postgresql

View file

@ -15,7 +15,7 @@ fatal()
exit 1
}
version="v1.5.3"
version="v1.5.4"
usr=`whoami`
# Installing k3s

View file

@ -22,4 +22,4 @@ version: 0.1.0
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
# Ref: https://github.com/helm/helm/issues/7858#issuecomment-608114589
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -17,13 +17,13 @@ function migrate() {
IFS=',' read -r -a migration_versions <<< "$1"
for version in ${migration_versions[*]}; do
echo "Migrating postgresql version $version"
psql -f ${pgdir}/${version}/${version}.sql
psql -f ${pgdir}/${version}/${version}.sql 2>&1
done
}
function init() {
echo "Initializing postgresql"
psql -f ${pgdir}/init_schema.sql
psql -f ${pgdir}/init_schema.sql 2>&1
}
# /bin/bash postgresql.sh migrate $migration_versions

View file

@ -1,4 +1,4 @@
fromVersion: "v1.5.3"
fromVersion: "v1.5.4"
# Databases specific variables
postgresql: &postgres
# For generating passwords
@ -100,7 +100,7 @@ utilities:
env:
debug: 0
uws: false
cluster: false
redis: false
# If you want to override something
# chartname:

View file

@ -9,8 +9,15 @@
<!--CSS -->
<!-- <link href="css/styles.css" rel="stylesheet"> -->
<style>
body {
margin: 0;
padding: 0;
}
.text-uppercase {
text-transform: uppercase;
}
.connecting-message {
margin-top: 50%;
/* margin-top: 50%; */
font-size: 20px;
color: #aaa;
text-align: center;
@ -19,31 +26,90 @@
}
.status-connecting .connecting-message {
display: block;
/* display: block; */
}
.status-connecting .card {
display: none;
/* display: none; */
}
.card{
min-width: 324px;
width: 350px;
max-width: 800px;
/*min-height: 220px;*/
max-height: 450px;
/*resize: both;
overflow: auto;*/
font: 14px 'Roboto', sans-serif;
/* min-width: 324px; */
width: 300px;
/* max-width: 800px; */
/* border: solid thin #ccc; */
/* box-shadow: 0 0 10px #aaa; */
border: solid 4px rgba(0, 0, 0, 0.2);
border-radius: 3px;
}
.card-footers {
display: flex;
border-bottom: solid thin #CCC;
padding: 5px 5px;
justify-content: space-between;
}
.card-footers .assist-controls {
display: flex;
align-items: center;
}
.btn-danger {
background-color: #CC0000 !important;
color: white;
}
.btn-danger:hover {
background-color: #FF0000 !important;
color: white;
}
.btn {
padding: 5px 8px;
font-size: 14px;
border-radius: 3px;
background-color: transparent;
border: none;
cursor: pointer;
display: flex;
align-items: center;
}
.btn span {
margin-left: 10px;
}
.btn:hover {
filter: brightness(0.9);
}
.card .card-header{
cursor: move;
padding: 14px 18px;
display: flex;
justify-content: space-between;
border-bottom: solid thin #ccc;
}
#agent-name, #duration{
cursor:default;
}
#video-container {
background-color: rgb(90, 90, 90);
position: relative;
overflow: hidden;
/* width: 300px; */
}
#video-container video {
width: 100% !important;
height: auto;
object-fit: cover;
}
#local-stream, #remote-stream {
display:none;
/* display:none; */ /* TODO uncomment this line */
}
#video-container.remote #remote-stream {
display: block;
@ -57,20 +123,30 @@
#local-stream{
width: 35%;
/* top: 50%; */
/* left: 70%; */
position: absolute;
z-index: 99;
bottom: 5px;
right: 5px;
border: thin solid rgba(255,255,255, .3);
overflow: hidden;
}
#audio-btn {
margin-right: 10px;
}
#audio-btn .bi-mic {
fill: #CC0000;
}
#audio-btn .bi-mic-mute {
display:none;
}
#audio-btn:after {
text-transform: capitalize;
content: 'Mute'
/* text-transform: capitalize; */
color: #CC0000;
content: 'Mute';
padding-left: 5px;
}
#audio-btn.muted .bi-mic-mute {
display: inline-block;
@ -79,19 +155,26 @@
display:none;
}
#audio-btn.muted:after {
content: 'Unmute'
content: 'Unmute';
padding-left: 5px;
}
#video-btn .bi-camera-video {
fill: #CC0000;
}
#video-btn .bi-camera-video-off {
display:none;
}
#video-btn:after {
text-transform: capitalize;
content: 'Stop Video'
/* text-transform: capitalize; */
color: #CC0000;
content: 'Stop Video';
padding-left: 5px;
}
#video-btn.off:after {
content: 'Start Video'
content: 'Start Video';
padding-left: 5px;
}
#video-btn.off .bi-camera-video-off {
display: inline-block;
@ -100,16 +183,201 @@
display:none;
}
/* CHART */
#chat-card {
display: flex;
flex-direction: column;
font-size: 14px;
background-color: white;
}
#chat-card .chat-messages { display: none; }
#chat-card .chat-input { display: none; }
#chat-card .chat-header .arrow-state { transform: rotate(180deg); }
#chat-card.active .chat-messages { display: flex; }
#chat-card.active .chat-input { display: flex; }
#chat-card.active .chat-header .arrow-state { transform: rotate(0deg); }
#chat-card .chat-header {
border-bottom: solid thin #ccc;
padding: 8px 16px;
display: flex;
justify-content: space-between;
cursor: pointer;
}
#chat-card .chat-header .chat-title {
display: flex;
align-items: center;
}
#chat-card .chat-header .chat-title span {
margin-left: 6px;
}
#chat-card .chat-messages {
padding: 8px 16px;
overflow-y: auto;
height: 250px;
overflow-y: auto;
flex-direction: column;
justify-content: flex-end;
}
#chat-card .message-text {
padding: 8px 16px;
border-radius: 20px;
color: #666666;
margin-bottom: 2px;
}
#chat-card .message .message-text {
/* max-width: 70%; */
width: fit-content;
}
#chat-card .message {
margin-bottom: 15px;
}
#chat-card .chat-messages .message.left .message-text {
text-align: left;
background: #D7E2E2;
border-radius: 0px 30px 30px 30px;
}
#chat-card .message .message-user {
font-size: 12px;
font-weight: bold;
color: #999999;
}
#chat-card .message .message-time {
font-size: 12px;
color: #999999;
margin-left: 4px;
}
#chat-card .chat-messages .message.right {
margin-left: auto;
text-align: right;
}
#chat-card .chat-messages .message.right .message-text {
background: #E4E4E4;
box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.15);
border-radius: 30px 30px 0px 30px;
}
#chat-card .chat-input {
margin: 10px;
border-radius: 3px;
box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.15);
background-color: #DDDDDD;
position: relative;
}
#chat-card .chat-input .input {
width: 100%;
border: none;
border-radius: 0px;
padding: 8px 16px;
font-size: 16px;
color: #333;
background-color: transparent;
}
.send-btn {
width: 26px;
height: 26px;
background-color: #AAA;
position: absolute;
right: 5px;
top: 0;
bottom: 0;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
margin: auto;
cursor: pointer;
}
.send-btn:hover {
background-color: #999;
}
.send-btn svg {
fill: #DDDDDD;
}
.confirm-window .title {
margin-bottom: 10px;
}
.confirm-window {
font: 14px 'Roboto', sans-serif;
padding: 20px;
background-color: #F3F3F3;
border-radius: 3px;
/* position: absolute; */
width: fit-content;
color: #666666;
display: none;
}
.confirm-window .actions {
background-color: white;
padding: 10px;
display: flex;
box-shadow: 0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1);
border-radius: 6px;
}
.btn-lg {
font-size: 14px;
padding: 10px 14px;
}
.btn-success {
background: rgba(0, 167, 47, 1);
color: white;
}
/* .btn-error:hover,
.btn-success:hover {
filter: brightness(0.9);
} */
.btn-error {
background: #FFE9E9;
/* border-color: #d43f3a; */
color: #CC0000;
}
</style>
<link href="css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
<div id="remote-control-confirm" class="confirm-window">
<div class="title">The agent is requesting remote control</div>
<div class="actions">
<button class="text-uppercase btn btn-lg btn-success" style="margin-right: 10px">Grant remote access</button>
<button class="text-uppercase btn btn-lg btn-error">Reject</button>
</div>
</div>
<div id="call-confirm" class="confirm-window">
<div class="title">Answer the call so the agent can assist.</div>
<div class="actions">
<button class="text-uppercase btn btn-lg btn-success" style="margin-right: 10px">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-telephone" viewBox="0 0 16 16">
<path d="M3.654 1.328a.678.678 0 0 0-1.015-.063L1.605 2.3c-.483.484-.661 1.169-.45 1.77a17.568 17.568 0 0 0 4.168 6.608 17.569 17.569 0 0 0 6.608 4.168c.601.211 1.286.033 1.77-.45l1.034-1.034a.678.678 0 0 0-.063-1.015l-2.307-1.794a.678.678 0 0 0-.58-.122l-2.19.547a1.745 1.745 0 0 1-1.657-.459L5.482 8.062a1.745 1.745 0 0 1-.46-1.657l.548-2.19a.678.678 0 0 0-.122-.58L3.654 1.328zM1.884.511a1.745 1.745 0 0 1 2.612.163L6.29 2.98c.329.423.445.974.315 1.494l-.547 2.19a.678.678 0 0 0 .178.643l2.457 2.457a.678.678 0 0 0 .644.178l2.189-.547a1.745 1.745 0 0 1 1.494.315l2.306 1.794c.829.645.905 1.87.163 2.611l-1.034 1.034c-.74.74-1.846 1.065-2.877.702a18.634 18.634 0 0 1-7.01-4.42 18.634 18.634 0 0 1-4.42-7.009c-.362-1.03-.037-2.137.703-2.877L1.885.511z"/>
</svg>
<span>Answer</span>
</button>
<button class="text-uppercase btn btn-lg btn-error">Reject</button>
</div>
</div>
<section id="or-assist" class="status-connecting">
<div class="connecting-message"> Connecting... </div>
<div class="card border-dark shadow">
<div class="card shadow">
<div class="drag-area card-header d-flex justify-content-between">
<div class="user-info">
<span>Call with</span>
@ -123,44 +391,98 @@
</div>
<div id="video-container" class="card-body bg-dark p-0 d-flex align-items-center position-relative">
<div id="local-stream" class="ratio ratio-4x3 rounded m-0 p-0 shadow">
<p class="text-white m-auto text-center">Starting video...</p>
<!-- <p class="text-white m-auto text-center">Starting video...</p> -->
<video id="video-local" autoplay muted></video>
</div>
<div id="remote-stream" class="ratio ratio-4x3 m-0 p-0">
<p id="remote-stream-placeholder" class="text-white m-auto text-center">Starting video...</p>
<!-- <p id="remote-stream-placeholder" class="text-white m-auto text-center">Starting video...</p> -->
<video id="video-remote" autoplay></video>
</div>
</div>
<div class="card-footer bg-transparent d-flex justify-content-between">
<div class="card-footers">
<div class="assist-controls">
<a href="#" id="audio-btn" class="btn btn-light btn-sm text-uppercase me-2"><i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic" viewBox="0 0 16 16">
<!-- Add class .muted to #audio-btn when user mutes audio -->
<button
href="#"
id="audio-btn"
class="btn btn-light btn-sm text-uppercase me-2"
>
<i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" class="bi bi-mic" viewBox="0 0 16 16">
<path d="M3.5 6.5A.5.5 0 0 1 4 7v1a4 4 0 0 0 8 0V7a.5.5 0 0 1 1 0v1a5 5 0 0 1-4.5 4.975V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 .5-.5z"/>
<path d="M10 8a2 2 0 1 1-4 0V3a2 2 0 1 1 4 0v5zM8 0a3 3 0 0 0-3 3v5a3 3 0 0 0 6 0V3a3 3 0 0 0-3-3z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic-mute" viewBox="0 0 16 16">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" class="bi bi-mic-mute" viewBox="0 0 16 16">
<path d="M13 8c0 .564-.094 1.107-.266 1.613l-.814-.814A4.02 4.02 0 0 0 12 8V7a.5.5 0 0 1 1 0v1zm-5 4c.818 0 1.578-.245 2.212-.667l.718.719a4.973 4.973 0 0 1-2.43.923V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 1 0v1a4 4 0 0 0 4 4zm3-9v4.879l-1-1V3a2 2 0 0 0-3.997-.118l-.845-.845A3.001 3.001 0 0 1 11 3z"/>
<path d="m9.486 10.607-.748-.748A2 2 0 0 1 6 8v-.878l-1-1V8a3 3 0 0 0 4.486 2.607zm-7.84-9.253 12 12 .708-.708-12-12-.708.708z"/>
</svg>
</i></a>
<!-- Add class .mute to #audio-btn when user mutes audio -->
<a href="#" id="video-btn" class="off btn btn-light btn-sm text-uppercase ms-2"><i >
</i>
</button>
<!--Add class .off to #video-btn when user stops video -->
<button
href="#"
id="video-btn"
class="btn btn-light btn-sm text-uppercase ms-2"
>
<i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M0 5a2 2 0 0 1 2-2h7.5a2 2 0 0 1 1.983 1.738l3.11-1.382A1 1 0 0 1 16 4.269v7.462a1 1 0 0 1-1.406.913l-3.111-1.382A2 2 0 0 1 9.5 13H2a2 2 0 0 1-2-2V5zm11.5 5.175 3.5 1.556V4.269l-3.5 1.556v4.35zM2 4a1 1 0 0 0-1 1v6a1 1 0 0 0 1 1h7.5a1 1 0 0 0 1-1V5a1 1 0 0 0-1-1H2z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video-off" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M10.961 12.365a1.99 1.99 0 0 0 .522-1.103l3.11 1.382A1 1 0 0 0 16 11.731V4.269a1 1 0 0 0-1.406-.913l-3.111 1.382A2 2 0 0 0 9.5 3H4.272l.714 1H9.5a1 1 0 0 1 1 1v6a1 1 0 0 1-.144.518l.605.847zM1.428 4.18A.999.999 0 0 0 1 5v6a1 1 0 0 0 1 1h5.014l.714 1H2a2 2 0 0 1-2-2V5c0-.675.334-1.272.847-1.634l.58.814zM15 11.73l-3.5-1.555v-4.35L15 4.269v7.462zm-4.407 3.56-10-14 .814-.58 10 14-.814.58z"/>
</svg>
</i>
</button>
</div>
<button id="end-call-btn" href="#" class="btn btn-danger btn-sm text-uppercase" style="margin-right: 8px;">End</button>
</div>
</i></a>
<!--Add class .off to #video-btn when user stops video -->
<!-- CHAT - add .active class to show the messages and input -->
<div id="chat-card" class="active">
<div class="chat-header">
<div class="chat-title">
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" fill="currentColor" class="bi bi-chat" viewBox="0 0 16 16">
<path d="M2.678 11.894a1 1 0 0 1 .287.801 10.97 10.97 0 0 1-.398 2c1.395-.323 2.247-.697 2.634-.893a1 1 0 0 1 .71-.074A8.06 8.06 0 0 0 8 14c3.996 0 7-2.807 7-6 0-3.192-3.004-6-7-6S1 4.808 1 8c0 1.468.617 2.83 1.678 3.894zm-.493 3.905a21.682 21.682 0 0 1-.713.129c-.2.032-.352-.176-.273-.362a9.68 9.68 0 0 0 .244-.637l.003-.01c.248-.72.45-1.548.524-2.319C.743 11.37 0 9.76 0 8c0-3.866 3.582-7 8-7s8 3.134 8 7-3.582 7-8 7a9.06 9.06 0 0 1-2.347-.306c-.52.263-1.639.742-3.468 1.105z"/>
</svg>
<span>Chat</span>
</div>
<div class="assist-end">
<a id="end-call-btn" href="#" class="btn btn-danger btn-sm text-uppercase">End</a>
<div>
<svg xmlns="http://www.w3.org/2000/svg" width="18" height="18" class="bi bi-chevron-up arrow-state" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M7.646 4.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1-.708.708L8 5.707l-5.646 5.647a.5.5 0 0 1-.708-.708l6-6z"/>
</svg>
</div>
</div>
<div class="chat-messages">
<div class="message left">
<div class="message-text"> Hey, did you get the key? </div>
<div>
<span class="message-user">Username</span>
<span class="message-time"> 00:00 </span>
</div>
</div>
<div class="message right">
<div class="message-text">
Oui, merci!
</div>
<div>
<span class="message-user">Username</span>
<span class="message-time">00:00</span>
</div>
</div>
</div>
<div class="chat-input">
<input type="text" class="input" placeholder="Type a message...">
<div class="send-btn">
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" class="bi bi-arrow-right-short" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z"/>
</svg>
</div>
</div>
</div>
</div>
</section>

View file

@ -1,16 +1,15 @@
{
"name": "@openreplay/tracker-assist",
"version": "3.5.3",
"version": "3.5.7",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@openreplay/tracker-assist",
"version": "3.5.3",
"version": "3.5.7",
"license": "MIT",
"dependencies": {
"csstype": "^3.0.10",
"npm-dragndrop": "^1.2.0",
"peerjs": "^1.3.2",
"socket.io-client": "^4.4.1"
},
@ -21,12 +20,12 @@
"typescript": "^4.6.0-dev.20211126"
},
"peerDependencies": {
"@openreplay/tracker": "^3.5.0"
"@openreplay/tracker": "^3.5.3"
}
},
"../tracker": {
"name": "@openreplay/tracker",
"version": "3.5.2",
"version": "3.5.4",
"dev": true,
"license": "MIT",
"dependencies": {
@ -3338,11 +3337,6 @@
"node": ">=0.10.0"
}
},
"node_modules/npm-dragndrop": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/npm-dragndrop/-/npm-dragndrop-1.2.0.tgz",
"integrity": "sha1-bgUkAP7Yay8eP0csU4EPkjcRu7U="
},
"node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
@ -6487,11 +6481,6 @@
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"dev": true
},
"npm-dragndrop": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/npm-dragndrop/-/npm-dragndrop-1.2.0.tgz",
"integrity": "sha1-bgUkAP7Yay8eP0csU4EPkjcRu7U="
},
"p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-assist",
"description": "Tracker plugin for screen assistance through the WebRTC",
"version": "3.5.5",
"version": "3.5.7",
"keywords": [
"WebRTC",
"assistance",
@ -25,12 +25,11 @@
},
"dependencies": {
"csstype": "^3.0.10",
"npm-dragndrop": "^1.2.0",
"peerjs": "^1.3.2",
"socket.io-client": "^4.4.1"
},
"peerDependencies": {
"@openreplay/tracker": "^3.5.0"
"@openreplay/tracker": "^3.5.3"
},
"devDependencies": {
"@openreplay/tracker": "file:../tracker",

View file

@ -0,0 +1,81 @@
export default class AnnotationCanvas {
private canvas: HTMLCanvasElement
private ctx: CanvasRenderingContext2D | null = null
private painting: boolean = false
constructor() {
this.canvas = document.createElement('canvas')
Object.assign(this.canvas.style, {
position: "fixed",
left: 0,
top: 0,
pointerEvents: "none",
zIndex: 2147483647 - 2,
})
}
private resizeCanvas = () => {
this.canvas.width = window.innerWidth
this.canvas.height = window.innerHeight
}
private lastPosition: [number, number] = [0,0]
start = (p: [number, number]) => {
this.painting = true
this.clrTmID && clearTimeout(this.clrTmID)
this.lastPosition = p
}
stop = () => {
if (!this.painting) { return }
this.painting = false
this.fadeOut()
}
move = (p: [number, number]) =>{
if (!this.ctx || !this.painting) { return }
this.ctx.globalAlpha = 1.0
this.ctx.beginPath()
this.ctx.moveTo(this.lastPosition[0], this.lastPosition[1])
this.ctx.lineTo(p[0], p[1])
this.ctx.lineWidth = 8
this.ctx.lineCap = "round"
this.ctx.lineJoin = "round"
this.ctx.strokeStyle = "red"
this.ctx.stroke()
this.lastPosition = p
}
clrTmID: ReturnType<typeof setTimeout> | null = null
private fadeOut() {
let timeoutID: ReturnType<typeof setTimeout>
const fadeStep = () => {
if (!this.ctx || this.painting ) { return }
this.ctx.globalCompositeOperation = 'destination-out'
this.ctx.fillStyle = "rgba(255, 255, 255, 0.1)"
this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
this.ctx.globalCompositeOperation = 'source-over'
timeoutID = setTimeout(fadeStep,100)
}
this.clrTmID = setTimeout(() => {
clearTimeout(timeoutID)
this.ctx &&
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height)
}, 4000)
fadeStep()
}
mount() {
document.body.appendChild(this.canvas)
this.ctx = this.canvas.getContext("2d")
window.addEventListener("resize", this.resizeCanvas)
this.resizeCanvas()
}
remove() {
if (this.canvas.parentNode){
this.canvas.parentNode.removeChild(this.canvas)
}
window.removeEventListener("resize", this.resizeCanvas)
}
}

View file

@ -5,8 +5,9 @@ import type { Properties } from 'csstype';
import { App } from '@openreplay/tracker';
import RequestLocalStream from './LocalStream.js';
import Mouse from './Mouse.js';
import RemoteControl from './RemoteControl.js';
import CallWindow from './CallWindow.js';
import AnnotationCanvas from './AnnotationCanvas.js';
import ConfirmWindow, { callConfirmDefault, controlConfirmDefault } from './ConfirmWindow.js';
import type { Options as ConfirmOptions } from './ConfirmWindow.js';
@ -14,12 +15,12 @@ import type { Options as ConfirmOptions } from './ConfirmWindow.js';
//@ts-ignore peerjs hack for webpack5 (?!) TODO: ES/node modules;
Peer = Peer.default || Peer;
type BehinEndCallback = () => ((()=>{}) | void)
type StartEndCallback = () => ((()=>{}) | void)
export interface Options {
onAgentConnect: BehinEndCallback,
onCallStart: BehinEndCallback,
onRemoteControlStart: BehinEndCallback,
onAgentConnect: StartEndCallback,
onCallStart: StartEndCallback,
onRemoteControlStart: StartEndCallback,
session_calling_peer_key: string,
session_control_peer_key: string,
callConfirm: ConfirmOptions,
@ -39,8 +40,11 @@ enum CallingState {
};
// TODO typing????
type OptionalCallback = (()=>{}) | void
type Agent = {
onDisconnect: ((()=>{}) | void), // TODO: better types here
onDisconnect?: OptionalCallback,
onControlReleased?: OptionalCallback,
name?: string
//
}
@ -139,6 +143,34 @@ export default class Assist {
})
socket.onAny((...args) => app.debug.log("Socket:", ...args))
const remoteControl = new RemoteControl(
this.options,
id => {
this.agents[id].onControlReleased = this.options.onRemoteControlStart()
this.emit("control_granted", id)
},
id => {
const cb = this.agents[id].onControlReleased
delete this.agents[id].onControlReleased
typeof cb === "function" && cb()
this.emit("control_rejected", id)
},
)
// TODO: check incoming args
socket.on("request_control", remoteControl.requestControl)
socket.on("release_control", remoteControl.releaseControl)
socket.on("scroll", remoteControl.scroll)
socket.on("click", remoteControl.click)
socket.on("move", remoteControl.move)
socket.on("input", remoteControl.input)
let annot: AnnotationCanvas | null = null
socket.on("moveAnnotation", (_, p) => annot && annot.move(p)) // TODO: restrict by id
socket.on("startAnnotation", (_, p) => annot && annot.start(p))
socket.on("stopAnnotation", () => annot && annot.stop())
socket.on("NEW_AGENT", (id: string, info) => {
this.agents[id] = {
onDisconnect: this.options.onAgentConnect && this.options.onAgentConnect(),
@ -148,7 +180,7 @@ export default class Assist {
this.app.stop();
this.app.start().then(() => { this.assistDemandedRestart = false })
})
socket.on("AGENTS_CONNECTED", (ids) => {
socket.on("AGENTS_CONNECTED", (ids: string[]) => {
ids.forEach(id =>{
this.agents[id] = {
onDisconnect: this.options.onAgentConnect && this.options.onAgentConnect(),
@ -157,75 +189,24 @@ export default class Assist {
this.assistDemandedRestart = true
this.app.stop();
this.app.start().then(() => { this.assistDemandedRestart = false })
const storedControllingAgent = sessionStorage.getItem(this.options.session_control_peer_key)
if (storedControllingAgent !== null && ids.includes(storedControllingAgent)) {
grantControl(storedControllingAgent)
socket.emit("control_granted", storedControllingAgent)
} else {
sessionStorage.removeItem(this.options.session_control_peer_key)
}
remoteControl.reconnect(ids)
})
let confirmRC: ConfirmWindow | null = null
const mouse = new Mouse() // TODO: lazy init
let controllingAgent: string | null = null
const requestControl = (id: string) => {
if (controllingAgent !== null) {
socket.emit("control_rejected", id)
return
}
controllingAgent = id // TODO: more explicit pending state
confirmRC = new ConfirmWindow(controlConfirmDefault(this.options.controlConfirm))
confirmRC.mount().then(allowed => {
if (allowed) {
grantControl(id)
socket.emit("control_granted", id)
} else {
releaseControl()
socket.emit("control_rejected", id)
}
}).catch()
}
let onRemoteControlStop: (()=>void) | null = null
const grantControl = (id: string) => {
controllingAgent = id
mouse.mount()
onRemoteControlStop = this.options.onRemoteControlStart() || null
sessionStorage.setItem(this.options.session_control_peer_key, id)
}
const releaseControl = () => {
typeof onRemoteControlStop === 'function' && onRemoteControlStop()
onRemoteControlStop = null
confirmRC?.remove()
mouse.remove()
controllingAgent = null
sessionStorage.removeItem(this.options.session_control_peer_key)
}
socket.on("request_control", requestControl)
socket.on("release_control", (id: string) => {
if (controllingAgent !== id) { return }
releaseControl()
})
socket.on("scroll", (id, d) => { id === controllingAgent && mouse.scroll(d) })
socket.on("click", (id, xy) => { id === controllingAgent && mouse.click(xy) })
socket.on("move", (id, xy) => { id === controllingAgent && mouse.move(xy) })
let confirmCall:ConfirmWindow | null = null
socket.on("AGENT_DISCONNECTED", (id) => {
// @ts-ignore (wtf, typescript?!)
this.agents[id] && this.agents[id].onDisconnect != null && this.agents[id].onDisconnect()
delete this.agents[id]
controllingAgent === id && releaseControl()
remoteControl.releaseControl(id)
// close the call also
if (callingAgent === id) {
confirmCall?.remove()
this.onRemoteCallEnd()
}
// @ts-ignore (wtf, typescript?!)
this.agents[id] && this.agents[id].onDisconnect != null && this.agents[id].onDisconnect()
delete this.agents[id]
})
socket.on("NO_AGENT", () => {
this.agents = {}
@ -281,11 +262,20 @@ export default class Assist {
style: this.options.confirmStyle,
}))
confirmAnswer = confirmCall.mount()
this.playNotificationSound()
this.onRemoteCallEnd = () => { // if call cancelled by a caller before confirmation
app.debug.log("Received call_end during confirm window opened")
confirmCall?.remove()
setCallingState(CallingState.False)
call.close()
}
setTimeout(() => {
if (this.callingState !== CallingState.Requesting) { return }
call.close()
confirmCall?.remove()
this.notifyCallEnd()
setCallingState(CallingState.False)
}, 30000)
}
confirmAnswer.then(agreed => {
@ -296,13 +286,18 @@ export default class Assist {
return
}
let callUI = new CallWindow()
const callUI = new CallWindow()
annot = new AnnotationCanvas()
annot.mount()
callUI.setAssistentName(agentName)
const onCallEnd = this.options.onCallStart()
const handleCallEnd = () => {
app.debug.log("Handle Call End")
call.close()
callUI.remove()
annot && annot.remove()
annot = null
setCallingState(CallingState.False)
onCallEnd && onCallEnd()
}
@ -350,6 +345,16 @@ export default class Assist {
});
}
private playNotificationSound() {
if ('Audio' in window) {
new Audio("https://static.openreplay.com/tracker-assist/notification.mp3")
.play()
.catch(e => {
this.app.debug.warn(e)
})
}
}
private clean() {
if (this.peer) {
this.peer.destroy()

View file

@ -1,4 +1,5 @@
import type { LocalStream } from './LocalStream.js';
import attachDND from './dnd';
const SS_START_TS_KEY = "__openreplay_assist_call_start_ts"
@ -18,20 +19,21 @@ export default class CallWindow {
private load: Promise<void>
constructor() {
const iframe = this.iframe = document.createElement('iframe');
const iframe = this.iframe = document.createElement('iframe')
Object.assign(iframe.style, {
position: "fixed",
zIndex: 2147483647 - 1,
//borderRadius: ".25em .25em .4em .4em",
//border: "4px rgba(0, 0, 0, .7)",
border: "none",
bottom: "10px",
right: "10px",
background: "white",
height: "200px",
width: "200px",
});
document.body.appendChild(iframe);
})
// TODO: find the best attribute name for the ignoring iframes
iframe.setAttribute("data-openreplay-obscured", "")
iframe.setAttribute("data-openreplay-hidden", "")
iframe.setAttribute("data-openreplay-ignore", "")
document.body.appendChild(iframe)
const doc = iframe.contentDocument;
if (!doc) {
@ -91,22 +93,10 @@ export default class CallWindow {
}, 500);
}
// TODO: better D'n'D
// mb set cursor:move here?
doc.body.setAttribute("draggable", "true");
doc.body.ondragstart = (e) => {
if (!e.dataTransfer || !e.target) { return; }
//@ts-ignore
if (!e.target.classList || !e.target.classList.contains("drag-area")) { return; }
e.dataTransfer.setDragImage(doc.body, e.clientX, e.clientY);
};
doc.body.ondragend = e => {
Object.assign(iframe.style, {
left: `${e.clientX}px`, // TODO: fix the case when ecoordinates are inside the iframe
top: `${e.clientY}px`,
bottom: 'auto',
right: 'auto',
})
const dragArea = doc.querySelector(".drag-area")
if (dragArea) {
// TODO: save coordinates on the new page
attachDND(iframe, dragArea, doc.documentElement)
}
});

View file

@ -2,75 +2,94 @@ import type { Properties } from 'csstype';
import { declineCall, acceptCall, cross, remoteControl } from './icons.js'
type ButtonOptions = HTMLButtonElement | string | {
innerHTML: string,
style?: Properties,
}
const TEXT_GRANT_REMORTE_ACCESS = "Grant Remote Access";
const TEXT_REJECT = "Reject";
const TEXT_ANSWER_CALL = `${acceptCall} &#xa0 Answer`;
type ButtonOptions =
| HTMLButtonElement
| string
| {
innerHTML: string;
style?: Properties;
};
// TODO: common strategy for InputOptions/defaultOptions merging
interface ConfirmWindowOptions {
text: string,
style?: Properties,
confirmBtn: ButtonOptions,
declineBtn: ButtonOptions,
text: string;
style?: Properties;
confirmBtn: ButtonOptions;
declineBtn: ButtonOptions;
}
export type Options = string | Partial<ConfirmWindowOptions>
export type Options = string | Partial<ConfirmWindowOptions>;
function confirmDefault(
opts: Options,
confirmBtn: ButtonOptions,
declineBtn: ButtonOptions,
text: string,
text: string
): ConfirmWindowOptions {
const isStr = typeof opts === "string"
return Object.assign({
text: isStr ? opts : text,
confirmBtn,
declineBtn,
}, isStr ? undefined : opts)
const isStr = typeof opts === "string";
return Object.assign(
{
text: isStr ? opts : text,
confirmBtn,
declineBtn
},
isStr ? undefined : opts
);
}
export const callConfirmDefault = (opts: Options) =>
confirmDefault(opts, acceptCall, declineCall, "You have an incoming call. Do you want to answer?")
export const controlConfirmDefault = (opts: Options) =>
confirmDefault(opts, remoteControl, cross, "Allow remote control?")
export const callConfirmDefault = (opts: Options) =>
confirmDefault(
opts,
TEXT_ANSWER_CALL,
TEXT_REJECT,
"You have an incoming call. Do you want to answer?"
);
export const controlConfirmDefault = (opts: Options) =>
confirmDefault(
opts,
TEXT_GRANT_REMORTE_ACCESS,
TEXT_REJECT,
"Allow remote control?"
);
function makeButton(options: ButtonOptions): HTMLButtonElement {
if (options instanceof HTMLButtonElement) {
return options
return options;
}
const btn = document.createElement('button')
const btn = document.createElement("button");
Object.assign(btn.style, {
background: "transparent",
padding: 0,
margin: 0,
border: 0,
padding: "10px 14px",
fontSize: "14px",
borderRadius: "3px",
border: "none",
cursor: "pointer",
borderRadius: "50%",
width: "22px",
height: "22px",
color: "white", // TODO: nice text button in case when only text is passed
})
display: "flex",
alignItems: "center",
textTransform: "uppercase",
marginRight: "10px"
});
if (typeof options === "string") {
btn.innerHTML = options
btn.innerHTML = options;
} else {
btn.innerHTML = options.innerHTML
Object.assign(btn.style, options.style)
btn.innerHTML = options.innerHTML;
Object.assign(btn.style, options.style);
}
return btn
return btn;
}
export default class ConfirmWindow {
private wrapper: HTMLDivElement;
constructor(options: ConfirmWindowOptions) {
const wrapper = document.createElement('div');
const popup = document.createElement('div');
const p = document.createElement('p');
const wrapper = document.createElement("div");
const popup = document.createElement("div");
const p = document.createElement("p");
p.innerText = options.text;
const buttons = document.createElement('div');
const buttons = document.createElement("div");
const confirmBtn = makeButton(options.confirmBtn);
const declineBtn = makeButton(options.declineBtn);
buttons.appendChild(confirmBtn);
@ -78,27 +97,45 @@ export default class ConfirmWindow {
popup.appendChild(p);
popup.appendChild(buttons);
Object.assign(confirmBtn.style, {
background: "rgba(0, 167, 47, 1)",
color: "white"
});
Object.assign(declineBtn.style, {
background: "#FFE9E9",
color: "#CC0000"
});
Object.assign(buttons.style, {
marginTop: "10px",
display: "flex",
alignItems: "center",
justifyContent: "space-evenly",
// justifyContent: "space-evenly",
backgroundColor: "white",
padding: "10px",
boxShadow: "0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1)",
borderRadius: "6px"
});
Object.assign(popup.style, {
position: "relative",
pointerEvents: "auto",
margin: "4em auto",
width: "90%",
maxWidth: "400px",
padding: "25px 30px",
background: "black",
opacity: ".75",
color: "white",
textAlign: "center",
borderRadius: ".25em .25em .4em .4em",
boxShadow: "0 0 20px rgb(0 0 0 / 20%)",
}, options.style);
Object.assign(
popup.style,
{
font: "14px 'Roboto', sans-serif",
position: "relative",
pointerEvents: "auto",
margin: "4em auto",
width: "90%",
maxWidth: "fit-content",
padding: "20px",
background: "#F3F3F3",
//opacity: ".75",
color: "black",
borderRadius: "3px",
boxShadow: "0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1)"
},
options.style
);
Object.assign(wrapper.style, {
position: "fixed",
@ -107,8 +144,8 @@ export default class ConfirmWindow {
height: "100%",
width: "100%",
pointerEvents: "none",
zIndex: 2147483647 - 1,
})
zIndex: 2147483647 - 1
});
wrapper.appendChild(popup);
this.wrapper = wrapper;
@ -116,18 +153,19 @@ export default class ConfirmWindow {
confirmBtn.onclick = () => {
this._remove();
this.resolve(true);
}
};
declineBtn.onclick = () => {
this._remove();
this.resolve(false);
}
};
}
private resolve: (result: boolean) => void = ()=>{};
private reject: ()=>void = ()=>{};
private resolve: (result: boolean) => void = () => {};
private reject: () => void = () => {};
mount(): Promise<boolean> {
document.body.appendChild(this.wrapper);
return new Promise((resolve, reject) => {
this.resolve = resolve;
this.reject = reject;
@ -135,7 +173,9 @@ export default class ConfirmWindow {
}
private _remove() {
if (!this.wrapper.parentElement) { return; }
if (!this.wrapper.parentElement) {
return;
}
document.body.removeChild(this.wrapper);
}
remove() {

View file

@ -45,7 +45,9 @@ export default class Mouse {
if (el instanceof HTMLElement) {
el.click()
el.focus()
return el
}
return null
}
private readonly pScrEl = document.scrollingElement || document.documentElement // Is it always correct

View file

@ -0,0 +1,88 @@
import Mouse from './Mouse.js';
import ConfirmWindow, { controlConfirmDefault } from './ConfirmWindow.js';
import type { Options as AssistOptions } from './Assist'
enum RCStatus {
Disabled,
Requesting,
Enabled,
}
export default class RemoteControl {
private mouse: Mouse | null
private status: RCStatus = RCStatus.Disabled
private agentID: string | null = null
constructor(
private options: AssistOptions,
private onGrand: (sting?) => void,
private onRelease: (sting?) => void) {}
reconnect(ids: string[]) {
const storedID = sessionStorage.getItem(this.options.session_control_peer_key)
if (storedID !== null && ids.includes(storedID)) {
this.grantControl(storedID)
} else {
sessionStorage.removeItem(this.options.session_control_peer_key)
}
}
private confirm: ConfirmWindow | null = null
requestControl = (id: string) => {
if (this.agentID !== null) {
this.releaseControl(id)
return
}
setTimeout(() =>{
if (this.status === RCStatus.Requesting) {
this.releaseControl(id)
}
}, 30000)
this.agentID = id
this.status = RCStatus.Requesting
this.confirm = new ConfirmWindow(controlConfirmDefault(this.options.controlConfirm))
this.confirm.mount().then(allowed => {
if (allowed) {
this.grantControl(id)
} else {
this.releaseControl(id)
}
}).catch()
}
grantControl = (id: string) => {
this.agentID = id
this.status = RCStatus.Enabled
this.mouse = new Mouse()
this.mouse.mount()
sessionStorage.setItem(this.options.session_control_peer_key, id)
this.onGrand(id)
}
releaseControl = (id: string) => {
if (this.agentID !== id) { return }
this.confirm?.remove()
this.mouse?.remove()
this.mouse = null
this.status = RCStatus.Disabled
this.agentID = null
sessionStorage.removeItem(this.options.session_control_peer_key)
this.onRelease(id)
}
scroll = (id, d) => { id === this.agentID && this.mouse?.scroll(d) }
move = (id, xy) => { id === this.agentID && this.mouse?.move(xy) }
private focused: HTMLElement | null = null
click = (id, xy) => {
if (id !== this.agentID || !this.mouse) { return }
this.focused = this.mouse.click(xy)
}
input = (id, value) => {
if (id !== this.agentID || !this.mouse || !this.focused) { return }
if (this.focused instanceof HTMLTextAreaElement
|| this.focused instanceof HTMLInputElement) {
this.focused.value = value
} else if (this.focused.isContentEditable) {
this.focused.innerText = value
}
}
}

View file

@ -0,0 +1,66 @@
/*
Here implemented the case when both dragArea and dropArea
are located inside the document of the dragging iframe.
Thus, all the events belong and relate to that inside document.
*/
export default function attachDND(
movingEl: HTMLIFrameElement,
dragArea: Element,
dropArea: Element,
) {
dragArea.addEventListener('pointerdown', userPressed, { passive: true })
let bbox,
startX, startY,
raf,
deltaX = 0, deltaY = 0
function userPressed(event) {
startX = event.clientX
startY = event.clientY
bbox = movingEl.getBoundingClientRect()
dropArea.addEventListener('pointermove', userMoved, { passive: true })
dropArea.addEventListener('pointerup', userReleased, { passive: true })
dropArea.addEventListener('pointercancel', userReleased, { passive: true })
};
/*
In case where the dropArea moves along with the dragging object
we can only append deltas, but not to define each time it moves.
*/
function userMoved(event) {
if (!raf) {
deltaX += event.clientX - startX
deltaY += event.clientY - startY
deltaX = Math.min(
Math.max(deltaX, -bbox.left),
window.innerWidth - bbox.right,
)
deltaY = Math.min(
Math.max(deltaY, -bbox.top),
window.innerHeight - bbox.bottom,
)
raf = requestAnimationFrame(userMovedRaf)
}
}
function userMovedRaf() {
movingEl.style.transform = "translate3d("+deltaX+"px,"+deltaY+"px, 0px)";
raf = null;
}
function userReleased() {
dropArea.removeEventListener('pointermove', userMoved)
dropArea.removeEventListener('pointerup', userReleased)
dropArea.removeEventListener('pointercancel', userReleased)
if (raf) {
cancelAnimationFrame(raf)
raf = null
}
movingEl.style.left = bbox.left + deltaX + "px"
movingEl.style.top = bbox.top + deltaY + "px"
movingEl.style.transform = "translate3d(0px,0px,0px)"
deltaX = deltaY = 0
}
}

View file

@ -2,7 +2,9 @@
// TODO: something with these big strings in bundle?
export const declineCall = `<svg xmlns="http://www.w3.org/2000/svg" height="22" width="22" viewBox="0 0 128 128" ><g id="Circle_Grid" data-name="Circle Grid"><circle cx="64" cy="64" fill="#ef5261" r="64"/></g><g id="icon"><path d="m57.831 70.1c8.79 8.79 17.405 12.356 20.508 9.253l4.261-4.26a7.516 7.516 0 0 1 10.629 0l9.566 9.566a7.516 7.516 0 0 1 0 10.629l-7.453 7.453c-7.042 7.042-27.87-2.358-47.832-22.319-9.976-9.981-16.519-19.382-20.748-28.222s-5.086-16.091-1.567-19.61l7.453-7.453a7.516 7.516 0 0 1 10.629 0l9.566 9.563a7.516 7.516 0 0 1 0 10.629l-4.264 4.271c-3.103 3.1.462 11.714 9.252 20.5z" fill="#eeefee"/></g></svg>`;
export const declineCall = `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-telephone" viewBox="0 0 16 16">
<path d="M3.654 1.328a.678.678 0 0 0-1.015-.063L1.605 2.3c-.483.484-.661 1.169-.45 1.77a17.568 17.568 0 0 0 4.168 6.608 17.569 17.569 0 0 0 6.608 4.168c.601.211 1.286.033 1.77-.45l1.034-1.034a.678.678 0 0 0-.063-1.015l-2.307-1.794a.678.678 0 0 0-.58-.122l-2.19.547a1.745 1.745 0 0 1-1.657-.459L5.482 8.062a1.745 1.745 0 0 1-.46-1.657l.548-2.19a.678.678 0 0 0-.122-.58L3.654 1.328zM1.884.511a1.745 1.745 0 0 1 2.612.163L6.29 2.98c.329.423.445.974.315 1.494l-.547 2.19a.678.678 0 0 0 .178.643l2.457 2.457a.678.678 0 0 0 .644.178l2.189-.547a1.745 1.745 0 0 1 1.494.315l2.306 1.794c.829.645.905 1.87.163 2.611l-1.034 1.034c-.74.74-1.846 1.065-2.877.702a18.634 18.634 0 0 1-7.01-4.42 18.634 18.634 0 0 1-4.42-7.009c-.362-1.03-.037-2.137.703-2.877L1.885.511z"/>
</svg>`;
export const acceptCall = declineCall.replace('fill="#ef5261"', 'fill="green"')

View file

@ -1,19 +1,51 @@
Copyright (c) 2021 OpenReplay.com <support@openreplay.com>
Copyright (c) 2022 Asayer, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
Reach out (license@openreplay.com) if you have any questions regarding the license.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
------------------------------------------------------------------------------------
Elastic License 2.0 (ELv2)
**Acceptance**
By using the software, you agree to all of the terms and conditions below.
**Copyright License**
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject to the limitations and conditions below
**Limitations**
You may not provide the software to third parties as a hosted or managed service, where the service provides users with access to any substantial set of the features or functionality of the software.
You may not move, change, disable, or circumvent the license key functionality in the software, and you may not remove or obscure any functionality in the software that is protected by the license key.
You may not alter, remove, or obscure any licensing, copyright, or other notices of the licensor in the software. Any use of the licensors trademarks is subject to applicable law.
**Patents**
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case subject to the limitations and conditions in this license. This license does not cover any patent claims that you cause to be infringed by modifications or additions to the software. If you or your company make any written claim that the software infringes or contributes to infringement of any patent, your patent license for the software granted under these terms ends immediately. If your company makes such a claim, your patent license ends immediately for work on behalf of your company.
**Notices**
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these terms.
If you modify the software, you must include in any modified copies of the software prominent notices stating that you have modified the software.
**No Other Rights**
These terms do not imply any licenses other than those expressly granted in these terms.
**Termination**
If you use the software in violation of these terms, such use is not licensed, and your licenses will automatically terminate. If the licensor provides you with a notice of your violation, and you cease all violation of this license no later than 30 days after you receive that notice, your licenses will be reinstated retroactively. However, if you violate these terms after such reinstatement, any additional violation of these terms will cause your licenses to terminate automatically and permanently.
**No Liability**
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will not be liable to you for any damages arising out of these terms or the use or nature of the software, under any kind of legal claim.
**Definitions**
The *licensor* is the entity offering these terms, and the *software* is the software the licensor makes available under these terms, including any portion of it.
*you* refers to the individual or entity agreeing to these terms.
*your company* is any legal entity, sole proprietorship, or other kind of organization that you work for, plus all organizations that have control over, are under the control of, or are under common control with that organization. *control* means ownership of substantially all the assets of an entity, or the power to direct its management and policies by vote, contract, or otherwise. Control can be direct or indirect.
*your licenses* are all the licenses granted to you for the software under these terms.
*use* means anything you do with the software requiring one of your licenses.
*trademark* means trademarks, service marks, and similar rights.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-fetch",
"description": "Tracker plugin for fetch requests recording ",
"version": "3.5.1",
"version": "3.5.2",
"keywords": [
"fetch",
"logging",

View file

@ -33,9 +33,10 @@ export default function(opts: Partial<Options> = {}) {
},
opts,
);
const origFetch = window.fetch
return (app: App | null) => {
if (app === null) {
return window.fetch;
return origFetch
}
const ihOpt = options.ignoreHeaders
@ -45,7 +46,7 @@ export default function(opts: Partial<Options> = {}) {
const fetch = async (input: RequestInfo, init: RequestInit = {}) => {
if (typeof input !== 'string') {
return window.fetch(input, init);
return origFetch(input, init);
}
if (options.sessionTokenHeader) {
const sessionToken = app.getSessionToken();
@ -63,7 +64,7 @@ export default function(opts: Partial<Options> = {}) {
}
}
const startTime = performance.now();
const response = await window.fetch(input, init);
const response = await origFetch(input, init);
const duration = performance.now() - startTime;
if (options.failuresOnly && response.status < 400) {
return response

View file

@ -1,4 +1,5 @@
node_modules
npm-debug.log
lib
cjs
.cache

View file

@ -1,18 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tracker_1 = require("@openreplay/tracker/cjs");
function default_1() {
return (app) => {
if (app === null) {
return (name) => (fn, thisArg) => thisArg === undefined ? fn : fn.bind(thisArg);
}
return (name) => (fn, thisArg) => (...args) => {
const startTime = performance.now();
const result = thisArg === undefined ? fn.apply(this, args) : fn.apply(thisArg, args);
const duration = performance.now() - startTime;
app.send(tracker_1.Messages.Profiler(name, duration, args.map(String).join(', '), String(result)));
return result;
};
};
}
exports.default = default_1;

View file

@ -1 +0,0 @@
{ "type": "commonjs" }

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-redux",
"description": "Tracker plugin for Redux state recording",
"version": "3.4.8",
"version": "3.5.0",
"keywords": [
"redux",
"logging",
@ -23,11 +23,11 @@
},
"dependencies": {},
"peerDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"redux": "^4.0.0"
},
"devDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"prettier": "^1.18.2",
"replace-in-files-cli": "^1.0.0",
"typescript": "^4.6.0-dev.20211126"

Some files were not shown because too many files have changed in this diff Show more