merged main repo

This commit is contained in:
Shekar Siri 2022-04-12 13:05:01 +02:00
commit bf7f796b22
89 changed files with 3636 additions and 1146 deletions

View file

@ -1,8 +1,9 @@
# This action will push the chalice changes to aws
on:
workflow_dispatch:
push:
branches:
- dev
- api-v1.5.5
paths:
- api/**

View file

@ -3,7 +3,7 @@ on:
workflow_dispatch:
push:
branches:
- dev
- api-v1.5.5
paths:
- frontend/**

View file

@ -102,7 +102,7 @@ def Build(a):
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
full_args, query_part= sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value

View file

@ -64,14 +64,17 @@ def get_live_sessions(project_id, filters=None):
return helper.list_to_camel_case(results)
def get_live_sessions_ws(project_id):
def get_live_sessions_ws(project_id, user_id=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
try:
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
live_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Live-Assist response")
@ -101,12 +104,12 @@ def get_live_session_by_id(project_id, session_id):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
try:
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Assist response")

View file

@ -2,7 +2,7 @@ import json
import schemas
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
@ -399,7 +399,10 @@ def get_details_chart(project_id, error_id, user_id, **data):
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
ch_sub_query = [f"{project_key} =%(project_id)s"]
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
@ -415,21 +418,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
@dev.timed
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
@ -437,8 +437,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True)
pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
@ -446,13 +446,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status)
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["error_id"] for e in statuses]
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
@ -473,6 +474,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
@ -483,11 +487,15 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if favorite_only:
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
main_pg_query = f"""\
SELECT full_count,
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
@ -522,7 +530,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
@ -557,16 +565,16 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]

View file

@ -97,7 +97,55 @@ def __get_data_for_extend(data):
return data["data"]
def __pg_errors_query(source=None):
def __pg_errors_query(source=None, value_length=None):
if value_length is None or value_length > 2:
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
@ -120,30 +168,6 @@ def __pg_errors_query(source=None):
AND lg.name ILIKE %(svalue)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{event_type.ERROR.ui_type}' AS type
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
AND lg.project_id = %(project_id)s
{"AND source = %(source)s" if source is not None else ""}
LIMIT 5));"""
@ -152,9 +176,12 @@ def __search_pg_errors(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
cur.mogrify(__pg_errors_query(source,
value_length=len(value) \
if SUPPORTED_TYPES[event_type.ERROR.ui_type].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value),
"source": source}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -162,26 +189,69 @@ def __search_pg_errors(project_id, value, key=None, source=None):
def __search_pg_errors_ios(project_id, value, key=None, source=None):
now = TimeUTC.now()
if SUPPORTED_TYPES[event_type.ERROR_IOS.ui_type].change_by_length is False or len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);"""
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.reason ILIKE %(svalue)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
LIMIT 5);"""
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.reason ILIKE %(value)s
LIMIT 5)
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{event_type.ERROR_IOS.ui_type}' AS type
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
LIMIT 5);""",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : errors")
return results
@ -198,42 +268,69 @@ def __search_pg_metadata(project_id, value, key=None, source=None):
for k in meta_keys.keys():
colname = metadata.index_to_colname(meta_keys[k])
sub_from.append(
f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)")
if SUPPORTED_TYPES[event_type.METADATA.ui_type].change_by_length is False or len(value) > 2:
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)
UNION
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(value)s LIMIT 5))
""")
else:
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
FROM public.sessions
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
results = helper.list_to_camel_case(cur.fetchall())
return results
def __generic_query(typename):
return f"""\
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5)"""
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f"""(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 5)
UNION
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(value)s
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type='{typename}'
AND value ILIKE %(svalue)s
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(__generic_query(event.ui_type),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
cur.execute(
cur.mogrify(
__generic_query(event.ui_type,
value_length=len(value) \
if SUPPORTED_TYPES[event.ui_type].change_by_length \
else None),
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
return helper.list_to_camel_case(cur.fetchall())
return f
@ -263,142 +360,96 @@ class event_type:
SUPPORTED_TYPES = {
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
query=__generic_query(typename=event_type.CLICK.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
query=__generic_query(typename=event_type.INPUT.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
query=__generic_query(typename=event_type.LOCATION.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
query=__generic_query(typename=event_type.CUSTOM.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
query=__generic_query(typename=event_type.REQUEST.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
value_limit=3,
starts_with="/",
starts_limit=4,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
query=__generic_query(typename=event_type.STATEACTION.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
query=None,
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
query=None, change_by_length=True),
# IOS
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[]),
change_by_length=True),
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=[""]),
change_by_length=True),
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
value_limit=3,
starts_with="/",
starts_limit=3,
ignore_if_starts_with=[""]),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors,
query=None,
value_limit=4,
starts_with="",
starts_limit=4,
ignore_if_starts_with=["/"]),
change_by_length=True),
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
query=None, change_by_length=True),
}
def __get_merged_queries(queries, value, project_id):
if len(queries) == 0:
return []
now = TimeUTC.now()
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")",
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
results = helper.list_to_camel_case(cur.fetchall())
print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}")
return results
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events.sort()
sub_queries = []
for e in autocomplete_events:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(svalue)s
LIMIT 5)""")
if len(value) > 2:
sub_queries.append(f"""(SELECT type, value
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND type= '{e}'
AND value ILIKE %(value)s
LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
FROM (SELECT project_id, type, value
FROM (SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(svalue)s
UNION
SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
WHERE project_id = %(project_id)s
AND value ILIKE %(value)s) AS u
WHERE Row_ID <= 5) AS sfa
ORDER BY sfa.type;""",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))
query = cur.mogrify(" UNION ".join(sub_queries) + ";",
{"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
def search_pg2(text, event_type, project_id, source, key):
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
if event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
# for IOS events autocomplete
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
# source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)

View file

@ -201,7 +201,7 @@ def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.Funnel
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
@ -217,16 +217,21 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
data.events = filter_stages(__parse_events(data.events))
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
@ -235,9 +240,14 @@ def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.Fu
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
data.events = __fix_stages(data.events)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
# fix: this fix for huge drop count
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
total_drop_due_to_issues = insights[0]["sessionsCount"]
# end fix
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
return {"data": {"stages": insights,
"totalDropDueToIssues": total_drop_due_to_issues}}
@ -256,7 +266,7 @@ def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSe
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
@ -307,7 +317,7 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
if f is None:
return {"errors": ["funnel not found"]}
data.startDate = data.startDate if data.startDate is not None else start_date

View file

@ -13,7 +13,8 @@ def get_by_session_id(session_id):
header_size,
encoded_body_size,
decoded_body_size,
success
success,
COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources
WHERE session_id = %(session_id)s;"""
params = {"session_id": session_id}

View file

@ -169,10 +169,10 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status="ALL", count_only=False, issue=None):
full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
@ -199,6 +199,17 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = "DESC"
else:
data.order = data.order.upper()
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == 'DESC' else 'MAX'}({sort})"
else:
sort = 'start_ts'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions)
@ -207,52 +218,58 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
MIN(full_sessions.start_ts) AS first_session_ts,
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
GROUP BY user_id
ORDER BY user_sessions_count DESC) AS users_sessions;""",
ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
) AS filtred_sessions
) AS full_sessions
GROUP BY user_id
) AS users_sessions;""",
full_args)
else:
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY issue_score DESC, {sort} {data.order}, session_id desc) AS rn
FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
ORDER BY issue_score DESC, {sort} {data.order}) AS full_sessions;""",
full_args)
# print("--------------------")
# print(main_query)
# print("--------------------")
try:
cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query)
print("--------- PAYLOAD -----------")
print(data.dict())
print("--------------------")
raise err
if errors_only:
return helper.list_to_camel_case(cur.fetchall())
cur.execute(main_query)
sessions = cur.fetchone()
if count_only:
return helper.dict_to_camel_case(sessions)
total = sessions["count"]
sessions = sessions["sessions"]
# sessions = []
# total = cur.rowcount
# row = cur.fetchone()
# limit = 200
# while row is not None and len(sessions) < limit:
# if row.get("favorite"):
# limit += 1
# sessions.append(row)
# row = cur.fetchone()
if errors_only:
return sessions
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
@ -283,9 +300,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
@ -368,6 +385,19 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
return sessions
def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql_details] \
or event.type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
] and (event.source is None or len(event.source) == 0) \
or event.type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
event.filters is None or len(event.filters) == 0))
def search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None):
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate,
@ -377,10 +407,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
"s.duration IS NOT NULL"
]
extra_from = ""
fav_only_join = ""
if favorite_only and not errors_only:
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
# extra_constraints.append("fs.user_id = %(userId)s")
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@ -587,6 +613,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
value_key=f_k))
# ---------------------------------------------------------------------------
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
valid_events_count += 1
events_query_from = []
event_index = 0
or_events = data.events_order == schemas.SearchEventOrder._or
@ -597,16 +630,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not is_any and len(event.value) == 0 and event_type not in [schemas.EventType.request_details,
schemas.EventType.graphql_details] \
or event_type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
] and (event.source is None or len(event.source) == 0) \
or event_type in [schemas.EventType.request_details, schemas.EventType.graphql_details] and (
event.filters is None or len(event.filters) == 0):
if not __is_valid_event(is_any=is_any, event=event):
continue
op = __get_sql_operator(event.operator)
is_not = False
@ -618,6 +642,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
"main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s",
"ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"]
if favorite_only and not errors_only:
event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)"
event_where.append("fs.user_id = %(userId)s")
else:
event_from = "%s"
event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s",
@ -922,7 +949,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
""")
else:
events_query_from.append(f"""\
(SELECT main.session_id, MIN(main.timestamp) AS timestamp
(SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp
FROM {event_from}
WHERE {" AND ".join(event_where)}
GROUP BY 1
@ -936,16 +963,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
MIN(timestamp) AS first_event_ts,
MAX(timestamp) AS last_event_ts
FROM ({events_joiner.join(events_query_from)}) AS u
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
events_query_part = f"""SELECT
event_0.session_id,
MIN(event_0.timestamp) AS first_event_ts,
MAX(event_{event_index - 1}.timestamp) AS last_event_ts
FROM {events_joiner.join(events_query_from)}
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
data.events = []
# ---------------------------------------------------------------------------
@ -959,19 +984,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# elif data.platform == schemas.PlatformType.desktop:
# extra_constraints.append(
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if data.order is None:
data.order = "DESC"
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
else:
sort = 'session_id'
if errors_only:
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
if error_status != "ALL":
extra_constraints.append("ser.project_id = %(project_id)s")
if error_status != schemas.ErrorStatus.all:
extra_constraints.append("ser.status = %(error_status)s")
full_args["status"] = error_status.lower()
full_args["error_status"] = error_status
if favorite_only:
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(userId)s")
@ -1009,7 +1029,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
{extra_from}
WHERE
{" AND ".join(extra_constraints)}"""
return full_args, query_part, sort
return full_args, query_part
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
@ -1108,48 +1128,6 @@ def search_by_issue(user_id, issue, project_id, start_date, end_date):
return helper.list_to_camel_case(rows)
def get_favorite_sessions(project_id, user_id, include_viewed=False):
with pg_client.PostgresClient() as cur:
query_part = cur.mogrify(f"""\
FROM public.sessions AS s
LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id
WHERE fs.user_id = %(userId)s""",
{"projectId": project_id, "userId": user_id}
)
extra_query = b""
if include_viewed:
extra_query = cur.mogrify(""",\
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s), FALSE) AS viewed""",
{"projectId": project_id, "userId": user_id})
cur.execute(f"""\
SELECT s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
TRUE AS favorite
{extra_query.decode('UTF-8')}
{query_part.decode('UTF-8')}
ORDER BY s.session_id
LIMIT 50;""")
sessions = cur.fetchall()
return helper.list_to_camel_case(sessions)
def get_user_sessions(project_id, user_id, start_date, end_date):
with pg_client.PostgresClient() as cur:
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]

View file

@ -80,32 +80,41 @@ def get_top_key_values(project_id):
return helper.dict_to_CAPITAL_keys(row)
def __generic_query(typename):
return f"""\
SELECT value, type
FROM ((SELECT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
def __generic_query(typename, value_length=None):
if value_length is None or value_length > 2:
return f""" (SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 5)
UNION
(SELECT value, type
(SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)) AS met"""
LIMIT 5);"""
return f""" SELECT DISTINCT value, type
FROM public.autocomplete
WHERE
project_id = %(project_id)s
AND type ='{typename}'
AND value ILIKE %(svalue)s
ORDER BY value
LIMIT 10;"""
def __generic_autocomplete(typename):
def f(project_id, text):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(__generic_query(typename),
query = cur.mogrify(__generic_query(typename,
value_length=len(text) \
if SUPPORTED_TYPES[typename].change_by_length else None),
{"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)})
@ -120,124 +129,73 @@ SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
value_limit=5,
starts_with="/",
starts_limit=5,
ignore_if_starts_with=[]),
change_by_length=True),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
# IOS
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
change_by_length=True),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
change_by_length=True),
}
@ -247,6 +205,7 @@ def search(text, meta_type, project_id):
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
# for IOS events autocomplete
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}

View file

@ -528,7 +528,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
split = issue.split('__^__')
issues_dict['significant' if is_sign else 'insignificant'].append({
"type": split[0],
"title": get_issue_title(split[0]),
"title": helper.get_issue_title(split[0]),
"affected_sessions": affected_sessions[issue],
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
"lost_conversions": lost_conversions,
@ -641,27 +641,3 @@ def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
output['stages'] = stages_list
output['criticalIssuesCount'] = n_critical_issues
return output
def get_issue_title(issue_type):
return {'click_rage': "Click Rage",
'dead_click': "Dead Click",
'excessive_scrolling': "Excessive Scrolling",
'bad_request': "Bad Request",
'missing_resource': "Missing Image",
'memory': "High Memory Usage",
'cpu': "High CPU",
'slow_resource': "Slow Resource",
'slow_page_load': "Slow Page Performance",
'crash': "Crash",
'ml_cpu': "High CPU",
'ml_memory': "High Memory Usage",
'ml_dead_click': "Dead Click",
'ml_click_rage': "Click Rage",
'ml_mouse_thrashing': "Mouse Thrashing",
'ml_excessive_scrolling': "Excessive Scrolling",
'ml_slow_resources': "Slow Resource",
'custom': "Custom Event",
'js_exception': "Error",
'custom_event_error': "Custom Error",
'js_error': "Error"}.get(issue_type, issue_type)

View file

@ -6,10 +6,7 @@ class Event:
class SupportedFilter:
def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with):
def __init__(self, get, query, change_by_length):
self.get = get
self.query = query
self.valueLimit = value_limit
self.startsWith = starts_with
self.startsLimit = starts_limit
self.ignoreIfStartsWith = ignore_if_starts_with
self.change_by_length = change_by_length

View file

@ -213,11 +213,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
if value is None:
return value
if op == schemas.SearchEventOperator._starts_with:
return value + '%'
return f"{value}%"
elif op == schemas.SearchEventOperator._ends_with:
return '%' + value
return f"%{value}"
elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains:
return '%' + value + '%'
return f"%{value}%"
return value

View file

@ -5,11 +5,12 @@ import psycopg2.extras
from decouple import config
from psycopg2 import pool
PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
_PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
@ -63,7 +64,7 @@ class PostgresClient:
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
self.connection = psycopg2.connect(**PG_CONFIG)
self.connection = psycopg2.connect(**_PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()

View file

@ -21,13 +21,6 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/sessions2/favorite', tags=["sessions"])
def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True)
}
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
@ -126,7 +119,7 @@ def events_search(projectId: int, q: str,
else:
return {"data": []}
result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key)
result = events.search(text=q, event_type=type, project_id=projectId, source=source, key=key)
return result
@ -147,17 +140,6 @@ def session_top_filter_values(projectId: int, context: schemas.CurrentContext =
return {'data': sessions_metas.get_top_key_values(projectId)}
@app.get('/{projectId}/sessions/filters/search', tags=["sessions"])
def get_session_filters_meta(projectId: int, q: str, type: str,
context: schemas.CurrentContext = Depends(OR_context)):
meta_type = type
if len(meta_type) == 0:
return {"data": []}
if len(q) == 0:
return {"data": []}
return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
@ -838,15 +820,8 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId)
def sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, user_id=userId)
return {'data': data}
@ -902,12 +877,9 @@ def edit_client(data: schemas.UpdateTenantSchema = Body(...),
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False,
data: schemas.SearchErrorsSchema = Body(...),
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(favorite, str):
favorite = True if len(favorite) == 0 else False
return errors.search(data, projectId, user_id=context.user_id, status=status, favorite_only=favorite)
return errors.search(data, projectId, user_id=context.user_id)
@app.get('/{projectId}/errors/stats', tags=['errors'])

View file

@ -11,6 +11,10 @@ def attribute_to_camel_case(snake_str):
return components[0] + ''.join(x.title() for x in components[1:])
def transform_email(email: str) -> str:
return email.lower() if isinstance(email, str) else email
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@ -18,6 +22,7 @@ class _Grecaptcha(BaseModel):
class UserLoginSchema(_Grecaptcha):
email: EmailStr = Field(...)
password: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class UserSignupSchema(UserLoginSchema):
@ -31,17 +36,21 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
email: Optional[str] = Field(None)
email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserPasswordSchema(BaseModel):
@ -70,7 +79,9 @@ class CurrentAPIContext(BaseModel):
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
@ -115,15 +126,19 @@ class CreateEditWebhookSchema(BaseModel):
class CreateMemberSchema(BaseModel):
userId: Optional[int] = Field(None)
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditMemberSchema(BaseModel):
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
@ -244,6 +259,8 @@ class EmailPayloadSchema(BaseModel):
link: str = Field(...)
message: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
@ -252,6 +269,8 @@ class MemberInvitationPayloadSchema(BaseModel):
client_id: str = Field(...)
sender_name: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class Config:
alias_generator = attribute_to_camel_case
@ -600,7 +619,7 @@ class SessionsSearchPayloadSchema(BaseModel):
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(default="startTs")
order: str = Field(default="DESC")
order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
@ -690,8 +709,24 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
rangeValue: Optional[str] = Field(None)
class ErrorStatus(str, Enum):
all = 'all'
unresolved = 'unresolved'
resolved = 'resolved'
ignored = 'ignored'
class ErrorSort(str, Enum):
occurrence = 'occurrence'
users_count = 'users'
sessions_count = 'sessions'
class SearchErrorsSchema(SessionsSearchPayloadSchema):
sort: ErrorSort = Field(default=ErrorSort.occurrence)
density: Optional[int] = Field(7)
status: Optional[ErrorStatus] = Field(default=ErrorStatus.all)
query: Optional[str] = Field(default=None)
class MetricPayloadSchema(BaseModel):

View file

@ -9,16 +9,17 @@ import (
func getSessionKey(sessionID uint64) string {
// Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID
return strconv.FormatUint(sessionID>>50, 10)
return strconv.FormatUint(sessionID>>50, 10)
}
func ResolveURL(baseurl string, rawurl string) string {
rawurl = strings.Trim(rawurl, " ")
if !isRelativeCachable(rawurl) {
return rawurl
}
base, _ := url.ParseRequestURI(baseurl) // fn Only for base urls
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
u, _ := url.Parse(rawurl) // TODO: handle errors ?
if base == nil || u == nil {
return rawurl
}
return base.ResolveReference(u).String() // ResolveReference same as base.Parse(rawurl)
@ -71,22 +72,20 @@ func GetCachePathForJS(rawurl string) string {
}
func GetCachePathForAssets(sessionID uint64, rawurl string) string {
return getCachePathWithKey(sessionID, rawurl)
return getCachePathWithKey(sessionID, rawurl)
}
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string {
fullURL, cachable := GetFullCachableURL(baseURL, relativeURL)
if !cachable {
return fullURL
}
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
u := url.URL{
Path: r.assetsURL.Path + getCachePathWithKey(sessionID, fullURL),
Host: r.assetsURL.Host,
Scheme: r.assetsURL.Scheme,
}
return u.String()
}

View file

@ -3,7 +3,7 @@ import json
import schemas
from chalicelib.core import dashboard
from chalicelib.core import sourcemaps, sessions
from chalicelib.utils import ch_client
from chalicelib.utils import ch_client, metrics_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
@ -424,9 +424,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if time_constraint:
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
if platform == 'mobile':
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == 'desktop':
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@ -438,20 +438,213 @@ def __get_step_size(startTimestamp, endTimestamp, density):
def __get_sort_key(key):
return {
"datetime": "max_datetime",
"lastOccurrence": "max_datetime",
"firstOccurrence": "min_datetime"
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
}.get(key, 'max_datetime')
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints_pg(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
"pe.project_id=%(project_id)s"]
pg_sub_query_chart = __get_basic_constraints_pg(platform, time_constraint=False, chart=True, project_key=None)
# pg_sub_query_chart.append("source ='js_exception'")
pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = []
error_ids = None
if data.startDate is None:
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
if data.endDate is None:
data.endDate = TimeUTC.now()
step_size = metrics_helper.__get_step_size(data.startDate, data.endDate, data.density, factor=1)
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = "DESC"
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
if data.bookmarked:
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
main_pg_query = f"""SELECT full_count,
error_id,
name,
message,
users,
sessions,
last_occurrence,
first_occurrence,
chart
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COUNT(session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL (SELECT DISTINCT session_id
FROM events.errors
WHERE {" AND ".join(pg_sub_query_chart)}
) AS sessions ON (TRUE)
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
if total == 0:
rows = []
else:
if len(statuses) == 0:
query = cur.mogrify(
"""SELECT error_id, status, parent_error_id, payload,
COALESCE((SELECT TRUE
FROM public.user_favorite_errors AS fe
WHERE errors.error_id = fe.error_id
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE errors.error_id = ve.error_id
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"user_id": user_id})
cur.execute(query=query)
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["errorId"]: s for s in statuses
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
else:
r["status"] = "untracked"
r["parent_error_id"] = None
r["favorite"] = False
r["viewed"] = False
r["stack"] = None
offset = len(rows)
rows = [r for r in rows if r["stack"] is None
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
or len(r["stack"]) > 0
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
offset -= len(rows)
return {
"data": {
'total': total - offset,
'errors': helper.list_to_camel_case(rows)
}
}
# refactor this function after clickhouse structure changes (missing search by query)
def search_deprecated(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
@ -460,17 +653,19 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
ch_sub_query.append("source ='js_exception'")
statuses = []
error_ids = None
if data.startDate is None:
# Clickhouse keeps data for the past month only, so no need to search beyond that
if data.startDate is None or data.startDate < TimeUTC.now(delta_days=-31):
data.startDate = TimeUTC.now(-30)
if data.endDate is None:
data.endDate = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
if len(data.events) > 0 or len(data.filters) > 0 or data.status != schemas.ErrorStatus.all:
print("-- searching for sessions before errors")
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["error_id"] for e in statuses]
error_ids = [e["errorId"] for e in statuses]
with ch_client.ClickHouseClient() as ch, pg_client.PostgresClient() as cur:
if data.startDate is None:
data.startDate = TimeUTC.now(-7)
@ -495,7 +690,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if favorite_only:
if data.bookmarked:
cur.execute(cur.mogrify(f"""SELECT error_id
FROM public.user_favorite_errors
WHERE user_id = %(userId)s
@ -571,15 +766,15 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
"userId": user_id})
cur.execute(query=query)
statuses = cur.fetchall()
statuses = helper.list_to_camel_case(cur.fetchall())
statuses = {
s["error_id"]: s for s in statuses
s["errorId"]: s for s in statuses
}
for r in rows:
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
r["parent_error_id"] = statuses[r["error_id"]]["parentErrorId"]
r["favorite"] = statuses[r["error_id"]]["favorite"]
r["viewed"] = statuses[r["error_id"]]["viewed"]
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]

View file

@ -7,7 +7,7 @@ def get_by_session_id(session_id):
with ch_client.ClickHouseClient() as ch:
ch_query = """\
SELECT
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s);"""
params = {"session_id": session_id}

View file

@ -41,6 +41,11 @@ def login(data: schemas.UserLoginSchema = Body(...)):
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
if "errors" in r:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=r["errors"][0]
)
tenant_id = r.pop("tenantId")

View file

@ -9,9 +9,9 @@ import (
"github.com/pkg/errors"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/queue/types"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
)
type Message = kafka.Message
@ -19,7 +19,7 @@ type Message = kafka.Message
type Consumer struct {
c *kafka.Consumer
messageHandler types.MessageHandler
commitTicker *time.Ticker
commitTicker *time.Ticker
pollTimeout uint
lastKafkaEventTs int64
@ -56,7 +56,7 @@ func NewConsumer(group string, topics []string, messageHandler types.MessageHand
return &Consumer{
c: c,
messageHandler: messageHandler,
commitTicker: time.NewTicker(2 * time.Minute),
commitTicker: time.NewTicker(2 * time.Minute),
pollTimeout: 200,
}
}
@ -65,13 +65,12 @@ func (consumer *Consumer) DisableAutoCommit() {
consumer.commitTicker.Stop()
}
func (consumer *Consumer) Commit() error {
consumer.c.Commit() // TODO: return error if it is not "No offset stored"
return nil
}
func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
assigned, err := consumer.c.Assignment()
if err != nil {
return err
@ -84,37 +83,38 @@ func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
timestamps = append(timestamps, p)
}
offsets, err := consumer.c.OffsetsForTimes(timestamps, 2000)
if err != nil {
if err != nil {
return errors.Wrap(err, "Kafka Consumer back commit error")
}
// Limiting to already committed
committed, err := consumer.c.Committed(assigned, 2000) // memorise?
logPartitions("Actually committed:",committed)
logPartitions("Actually committed:", committed)
if err != nil {
return errors.Wrap(err, "Kafka Consumer retrieving committed error")
}
for _, offs := range offsets {
for _, comm := range committed {
if comm.Offset == kafka.OffsetStored ||
if comm.Offset == kafka.OffsetStored ||
comm.Offset == kafka.OffsetInvalid ||
comm.Offset == kafka.OffsetBeginning ||
comm.Offset == kafka.OffsetEnd { continue }
if comm.Partition == offs.Partition &&
comm.Offset == kafka.OffsetBeginning ||
comm.Offset == kafka.OffsetEnd {
continue
}
if comm.Partition == offs.Partition &&
(comm.Topic != nil && offs.Topic != nil && *comm.Topic == *offs.Topic) &&
comm.Offset > offs.Offset {
comm.Offset > offs.Offset {
offs.Offset = comm.Offset
}
}
}
// TODO: check per-partition errors: offsets[i].Error
// TODO: check per-partition errors: offsets[i].Error
_, err = consumer.c.CommitOffsets(offsets)
return errors.Wrap(err, "Kafka Consumer back commit error")
}
func (consumer *Consumer) CommitBack(gap int64) error {
func (consumer *Consumer) CommitBack(gap int64) error {
if consumer.lastKafkaEventTs == 0 {
return nil
}
@ -135,31 +135,31 @@ func (consumer *Consumer) ConsumeNext() error {
}
switch e := ev.(type) {
case *kafka.Message:
if e.TopicPartition.Error != nil {
return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
}
ts := e.Timestamp.UnixNano()/ 1e6
consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
Topic: *(e.TopicPartition.Topic),
ID: uint64(e.TopicPartition.Offset),
Timestamp: ts,
})
consumer.lastKafkaEventTs = ts
// case kafka.AssignedPartitions:
// logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
// consumer.partitions = e.Partitions
// consumer.c.Assign(e.Partitions)
// log.Printf("Actually partitions assigned!")
// case kafka.RevokedPartitions:
// log.Println("Kafka Cosumer: Partitions Revoked")
// consumer.partitions = nil
// consumer.c.Unassign()
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown {
os.Exit(1)
}
log.Printf("Consumer error: %v\n", e)
case *kafka.Message:
if e.TopicPartition.Error != nil {
return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
}
ts := e.Timestamp.UnixNano() / 1e6
consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
Topic: *(e.TopicPartition.Topic),
ID: uint64(e.TopicPartition.Offset),
Timestamp: ts,
})
consumer.lastKafkaEventTs = ts
// case kafka.AssignedPartitions:
// logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
// consumer.partitions = e.Partitions
// consumer.c.Assign(e.Partitions)
// log.Printf("Actually partitions assigned!")
// case kafka.RevokedPartitions:
// log.Println("Kafka Cosumer: Partitions Revoked")
// consumer.partitions = nil
// consumer.c.Unassign()
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded {
os.Exit(1)
}
log.Printf("Consumer error: %v\n", e)
}
return nil
}
@ -173,8 +173,6 @@ func (consumer *Consumer) Close() {
}
}
// func (consumer *Consumer) consume(
// message func(m *kafka.Message) error,
// commit func(c *kafka.Consumer) error,
@ -230,7 +228,6 @@ func (consumer *Consumer) Close() {
// }
// }
// func (consumer *Consumer) Consume(
// message func(key uint64, value []byte) error,
// ) error {

View file

@ -0,0 +1,91 @@
\set ON_ERROR_STOP true
SET client_min_messages TO NOTICE;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3-ee'
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -721,7 +721,22 @@ $$
CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
BEGIN
IF NOT EXISTS(SELECT *
@ -1018,7 +1033,7 @@ $$
CREATE INDEX IF NOT EXISTS graphql_request_body_nn_gin_idx ON events.graphql USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_idx ON events.graphql (response_body) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS graphql_response_body_nn_gin_idx ON events.graphql USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE TABLE IF NOT EXISTS events.state_actions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,

View file

@ -3,7 +3,7 @@ var {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers
var express = require('express');
const {ExpressPeerServer} = require('peer');
var socket;
if (process.env.cluster === "true") {
if (process.env.redis === "true") {
console.log("Using Redis");
socket = require("./servers/websocket-cluster");
} else {

View file

@ -5,8 +5,7 @@ const geoip2Reader = require('@maxmind/geoip2-node').Reader;
const {extractPeerId} = require('./peerjs-server');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
var wsRouter = express.Router();
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -15,14 +14,37 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
let io;
const debug = process.env.debug === "1" || false;
const REDIS_URL = process.env.REDIS_URL || "redis://localhost:6379";
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
}
const uniqueSessions = function (data) {
let resArr = [];
@ -36,18 +58,40 @@ const uniqueSessions = function (data) {
return resArr;
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
let result = {"data": liveSessions};
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
let rooms = await io.of('/').adapter.allRooms();
return rooms;
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -56,37 +100,64 @@ const socketsList = async function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
@ -94,51 +165,48 @@ const socketsLive = async function (req, res) {
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await io.of('/').adapter.allRooms();
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
liveSessions[projectKey] = uniqueSessions(liveSessions[projectKey]);
liveSessions[projectKey] = uniqueSessions(liveSessions[_projectKey]);
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -219,35 +287,13 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
socket.identity = socket.handshake.query.identity;
let {projectKey, sessionId} = extractPeerId(socket.peerId);
const {projectKey, sessionId} = extractPeerId(socket.peerId);
socket.sessionId = sessionId;
socket.projectKey = projectKey;
socket.lastMessageReceivedAt = Date.now();

View file

@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
var {extractPeerId} = require('./peerjs-server');
var wsRouter = express.Router();
const {extractPeerId} = require('./peerjs-server');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -12,22 +12,68 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
let debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1" || false;
const socketsList = function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
}
const createSocketIOServer = function (server, prefix) {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
let result = {"data": liveSessions};
}
const extractUserIdFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getQuery("userId")) {
debug && console.log(`[WS]where userId=${req.getQuery("userId")}`);
return req.getQuery("userId");
}
} else if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (process.env.uws === "true") {
if (req.getParameter(0)) {
debug && console.log(`[WS]where projectKey=${req.getParameter(0)}`);
return req.getParameter(0);
}
} else if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -36,84 +82,111 @@ const socketsList = function (req, res) {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
if (process.env.uws === "true") {
req.params = {projectKey: req.getParameter(0)};
}
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
let result = {"data": liveSessions[req.params.projectKey] || []};
if (process.env.uws !== "true") {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
}
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -192,29 +265,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
if (process.env.uws !== "true") {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
} else {
io = new _io.Server({
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket',
// transports: ['websocket'],
// upgrade: false
});
io.attachApp(server);
}
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@ -285,10 +337,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@ -302,7 +354,7 @@ module.exports = {
});
console.log("WS server started")
setInterval((io) => {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);

1
frontend/.gitignore vendored
View file

@ -8,3 +8,4 @@ app/components/ui/SVG.js
*.DS_Store
.env
*css.d.ts
*.cache

View file

@ -22,6 +22,7 @@ var timeoutId;
metaList: state.getIn(['customFields', 'list']).map(i => i.key),
currentPage: state.getIn([ 'search', 'currentPage' ]),
scrollY: state.getIn([ 'search', 'scrollY' ]),
lastPlayedSessionId: state.getIn([ 'sessions', 'lastPlayedSessionId' ]),
}), {
applyFilter,
addAttribute,
@ -87,6 +88,7 @@ export default class SessionList extends React.PureComponent {
metaList,
currentPage,
total,
lastPlayedSessionId,
} = this.props;
const _filterKeys = filters.map(i => i.key);
const hasUserFilter = _filterKeys.includes(FilterKey.USERID) || _filterKeys.includes(FilterKey.USERANONYMOUSID);
@ -122,6 +124,7 @@ export default class SessionList extends React.PureComponent {
hasUserFilter={hasUserFilter}
onUserClick={this.onUserClick}
metaList={metaList}
lastPlayedSessionId={lastPlayedSessionId}
/>
))}
</Loader>

View file

@ -8,14 +8,14 @@ const DOCUMENTATION = 'NPM';
// const SEGMENT = 'SEGMENT';
// const GOOGLE_TAG = 'GOOGLE TAG';
const TABS = [
{ key: PROJECT, text: PROJECT },
{ key: DOCUMENTATION, text: DOCUMENTATION },
{ key: PROJECT, text: PROJECT },
// { key: SEGMENT, text: SEGMENT },
// { key: GOOGLE_TAG, text: GOOGLE_TAG }
];
class TrackingCodeModal extends React.PureComponent {
state = { copied: false, changed: false, activeTab: PROJECT };
state = { copied: false, changed: false, activeTab: DOCUMENTATION };
setActiveTab = (tab) => {
this.setState({ activeTab: tab });

View file

@ -9,6 +9,7 @@ import Controls from './Controls';
import Overlay from './Overlay';
import stl from './player.css';
import EventsToggleButton from '../../Session/EventsToggleButton';
import { updateLastPlayedSession } from 'Duck/sessions';
@connectPlayer(state => ({
live: state.live,
@ -18,16 +19,19 @@ import EventsToggleButton from '../../Session/EventsToggleButton';
return {
fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]),
nextId: state.getIn([ 'sessions', 'nextId' ]),
sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]),
closedLive: !!state.getIn([ 'sessions', 'errors' ]) || (isAssist && !state.getIn([ 'sessions', 'current', 'live' ])),
}
}, {
hideTargetDefiner,
fullscreenOff,
updateLastPlayedSession,
})
export default class Player extends React.PureComponent {
screenWrapper = React.createRef();
componentDidMount() {
this.props.updateLastPlayedSession(this.props.sessionId);
if (this.props.closedLive) return;
const parentElement = findDOMNode(this.screenWrapper.current); //TODO: good architecture

View file

@ -36,7 +36,7 @@ function FilterSeries(props: Props) {
const onAddFilter = (filter) => {
filter.value = [""]
if (filter.hasOwnProperty('filters')) {
if (filter.hasOwnProperty('filters') && Array.isArray(filter.filters)) {
filter.filters = filter.filters.map(i => ({ ...i, value: [""] }))
}
props.addSeriesFilterFilter(seriesIndex, filter);

View file

@ -3,29 +3,25 @@ import cn from 'classnames';
import {
Link,
Icon,
OsIcon,
BrowserIcon,
CountryFlag,
Avatar,
TextEllipsis,
Label,
} from 'UI';
import { deviceTypeIcon } from 'App/iconNames';
import { toggleFavorite, setSessionPath } from 'Duck/sessions';
import { session as sessionRoute, liveSession as liveSessionRoute, withSiteId } from 'App/routes';
import { durationFormatted, formatTimeOrDate } from 'App/date';
import stl from './sessionItem.css';
import LiveTag from 'Shared/LiveTag';
import Bookmark from 'Shared/Bookmark';
import Counter from './Counter'
import { withRouter } from 'react-router-dom';
import SessionMetaList from './SessionMetaList';
import ErrorBars from './ErrorBars';
import { assist as assistRoute, liveSession, isRoute } from "App/routes";
import { assist as assistRoute, liveSession, sessions as sessionsRoute, isRoute } from "App/routes";
import { capitalize } from 'App/utils';
const ASSIST_ROUTE = assistRoute();
const ASSIST_LIVE_SESSION = liveSession()
const SESSIONS_ROUTE = sessionsRoute();
// const Label = ({ label = '', color = 'color-gray-medium'}) => (
// <div className={ cn('font-light text-sm', color)}>{label}</div>
@ -69,10 +65,13 @@ export default class SessionItem extends React.PureComponent {
disableUser = false,
metaList = [],
showActive = false,
lastPlayedSessionId,
} = this.props;
const formattedDuration = durationFormatted(duration);
const hasUserId = userId || userAnonymousId;
const isSessions = isRoute(SESSIONS_ROUTE, this.props.location.pathname);
const isAssist = isRoute(ASSIST_ROUTE, this.props.location.pathname) || isRoute(ASSIST_LIVE_SESSION, this.props.location.pathname);
const isLastPlayed = lastPlayedSessionId === sessionId;
const _metaList = Object.keys(metadata).filter(i => metaList.includes(i)).map(key => {
const value = metadata[key];
@ -125,7 +124,7 @@ export default class SessionItem extends React.PureComponent {
</span>
</div>
</div>
{ !isAssist && (
{ isSessions && (
<div style={{ width: "10%"}} className="self-center px-2 flex items-center">
<ErrorBars count={issueTypes.length} />
</div>
@ -139,6 +138,15 @@ export default class SessionItem extends React.PureComponent {
</Label>
)}
<div className={ stl.playLink } id="play-button" data-viewed={ viewed }>
{ isSessions && (
<div className="mr-4 flex-shrink-0 w-24">
{ isLastPlayed && (
<Label className="bg-gray-lightest p-1 px-2 rounded-lg">
<span className="color-gray-medium text-xs" style={{ whiteSpace: 'nowrap'}}>LAST PLAYED</span>
</Label>
)}
</div>
)}
<Link to={ isAssist ? liveSessionRoute(sessionId) : sessionRoute(sessionId) }>
<Icon name={ !viewed && !isAssist ? 'play-fill' : 'play-circle-light' } size="42" color={isAssist ? "tealx" : "teal"} />
</Link>

View file

@ -10,12 +10,12 @@ import cn from 'classnames';
const PROJECT = 'Using Script';
const DOCUMENTATION = 'Using NPM';
const TABS = [
{ key: DOCUMENTATION, text: DOCUMENTATION },
{ key: PROJECT, text: PROJECT },
{ key: DOCUMENTATION, text: DOCUMENTATION }
];
class TrackingCodeModal extends React.PureComponent {
state = { copied: false, changed: false, activeTab: PROJECT };
state = { copied: false, changed: false, activeTab: DOCUMENTATION };
setActiveTab = (tab) => {
this.setState({ activeTab: tab });

View file

@ -7,9 +7,9 @@ import withRequestState, { RequestTypes } from './requestStateCreator';
import { getRE } from 'App/utils';
import { LAST_7_DAYS } from 'Types/app/period';
import { getDateRangeFromValue } from 'App/dateRange';
const name = 'sessions';
const INIT = 'sessions/INIT';
const FETCH_LIST = new RequestTypes('sessions/FETCH_LIST');
const FETCH = new RequestTypes('sessions/FETCH');
const FETCH_FAVORITE_LIST = new RequestTypes('sessions/FETCH_FAVORITE_LIST');
@ -26,6 +26,7 @@ const TOGGLE_CHAT_WINDOW = 'sessions/TOGGLE_CHAT_WINDOW';
const SET_FUNNEL_PAGE_FLAG = 'sessions/SET_FUNNEL_PAGE_FLAG';
const SET_TIMELINE_POINTER = 'sessions/SET_TIMELINE_POINTER';
const SET_SESSION_PATH = 'sessions/SET_SESSION_PATH';
const LAST_PLAYED_SESSION_ID = `${name}/LAST_PLAYED_SESSION_ID`;
const SET_ACTIVE_TAB = 'sessions/SET_ACTIVE_TAB';
@ -60,6 +61,7 @@ const initialState = Map({
funnelPage: Map(),
timelinePointer: null,
sessionPath: '',
lastPlayedSessionId: null,
});
const reducer = (state = initialState, action = {}) => {
@ -248,11 +250,21 @@ const reducer = (state = initialState, action = {}) => {
return state.set('timelinePointer', action.pointer);
case SET_SESSION_PATH:
return state.set('sessionPath', action.path);
case LAST_PLAYED_SESSION_ID:
return updateListItem(state, action.sessionId, { viewed: true }).set('lastPlayedSessionId', action.sessionId);
default:
return state;
}
};
function updateListItem(state, sourceSessionId, instance) {
const list = state.get('list');
const index = list.findIndex(({ sessionId }) => sessionId === sourceSessionId);
if (index === -1) return state;
return state.updateIn([ 'list', index ], session => session.merge(instance));
}
export default withRequestState({
_: [ FETCH, FETCH_LIST ],
fetchLiveListRequest: FETCH_LIVE_LIST,
@ -391,4 +403,11 @@ export function setSessionPath(path) {
type: SET_SESSION_PATH,
path
}
}
export function updateLastPlayedSession(sessionId) {
return {
type: LAST_PLAYED_SESSION_ID,
sessionId,
};
}

View file

@ -31,16 +31,6 @@ export default abstract class BaseScreen {
const screen = document.createElement('div');
setTimeout(function() {
iframe.contentDocument?.addEventListener('mousemove', function() {
overlay.style.display = 'block';
})
overlay.addEventListener('contextmenu', function() {
overlay.style.display = 'none';
})
}, 10)
screen.className = styles.screen;
screen.appendChild(iframe);
screen.appendChild(overlay);
@ -58,6 +48,20 @@ export default abstract class BaseScreen {
// parentElement.onresize = this.scale;
window.addEventListener('resize', this.scale);
this.scale();
/* == For the Inspecting Document content == */
this.overlay.addEventListener('contextmenu', () => {
this.overlay.style.display = 'none'
const doc = this.document
if (!doc) { return }
const returnOverlay = () => {
this.overlay.style.display = 'block'
doc.removeEventListener('mousemove', returnOverlay)
doc.removeEventListener('mouseclick', returnOverlay) // TODO: prevent default in case of input selection
}
doc.addEventListener('mousemove', returnOverlay)
doc.addEventListener('mouseclick', returnOverlay)
})
}
get window(): WindowProxy | null {
@ -70,10 +74,10 @@ export default abstract class BaseScreen {
private boundingRect: DOMRect | null = null;
private getBoundingClientRect(): DOMRect {
//if (this.boundingRect === null) {
return this.boundingRect = this.overlay.getBoundingClientRect(); // expensive operation?
//}
//return this.boundingRect;
if (this.boundingRect === null) {
return this.boundingRect = this.overlay.getBoundingClientRect() // expensive operation?
}
return this.boundingRect
}
getInternalViewportCoordinates({ x, y }: Point): Point {
@ -85,17 +89,22 @@ export default abstract class BaseScreen {
const screenX = (x - overlayX) * scale;
const screenY = (y - overlayY) * scale;
return { x: screenX, y: screenY };
return { x: Math.round(screenX), y: Math.round(screenY) };
}
getCurrentScroll(): Point {
const docEl = this.document?.documentElement
const x = docEl ? docEl.scrollLeft : 0
const y = docEl ? docEl.scrollTop : 0
return { x, y }
}
getInternalCoordinates(p: Point): Point {
const { x, y } = this.getInternalViewportCoordinates(p);
const docEl = this.document?.documentElement
const scrollX = docEl ? docEl.scrollLeft : 0
const scrollY = docEl ? docEl.scrollTop : 0
const sc = this.getCurrentScroll()
return { x: x+scrollX, y: y+scrollY };
return { x: x+sc.x, y: y+sc.y };
}
getElementFromInternalPoint({ x, y }: Point): Element | null {

View file

@ -1,4 +1,5 @@
.screen {
user-select: none;
overflow: hidden;
position: absolute;
transform-origin: left top;

View file

@ -0,0 +1,85 @@
export default class AnnotationCanvas {
readonly canvas: HTMLCanvasElement
private ctx: CanvasRenderingContext2D | null = null
private painting: boolean = false
constructor() {
this.canvas = document.createElement('canvas')
Object.assign(this.canvas.style, {
position: "fixed",
cursor: "url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAAXNSR0IArs4c6QAAAWNJREFUOE+l1D1Lw1AUBuD35Catg5NzaCMRMilINnGok7sguLg4OlRcBTd/hqBVB0ed7KDgIPgXhJoaG10Kgk4a83EkhcYYktimd703z31zzuESSqwGIDs1bRvAIiRcWrZ9ETFUwhJ6XTsDsPH7Le1bz08H42JkGMa09+W2CVhKBmHC7jhYlOgUTPdUEa3Q86+SIDN/j4olf43BtJMFjoJl1AgMUJMUcRInZHT+w7KgYakGoDxVafmue0hBsJeLmaapvPffziFhraDjDMKWZdvHRaNRlCi2mUNHYl55dBwrDysFZWGloTQ2EZTEJoZiTFXVmaos34Ixn9e5qNgCaHR6vW7emcFozNVmN1ERbfb9myww3bVCTK9rPsDrpCh37HnXAC3Ek5lqf9ErM0im1zUG8BmGtCqq4mEIjppoeEESA5g/JIkaLMuv7AVHEgfNohqlU/7Fol3mPodiufvS7Yz7cP4ARjbPWyYPZSMAAAAASUVORK5CYII=') 0 20, crosshair",
left: 0,
top: 0,
//zIndex: 2147483647 - 2,
})
}
isPainting() {
return this.painting
}
private resizeCanvas = () => {
if (!this.canvas.parentElement) { return }
this.canvas.width = this.canvas.parentElement.offsetWidth
this.canvas.height = this.canvas.parentElement.offsetHeight
}
private lastPosition: [number, number] = [0,0]
start = (p: [number, number]) => {
this.painting = true
this.clrTmID && clearTimeout(this.clrTmID)
this.lastPosition = p
}
stop = () => {
if (!this.painting) { return }
this.painting = false
this.fadeOut()
}
move = (p: [number, number]) =>{
if (!this.ctx || !this.painting) { return }
this.ctx.globalAlpha = 1.0
this.ctx.beginPath()
this.ctx.moveTo(this.lastPosition[0], this.lastPosition[1])
this.ctx.lineTo(p[0], p[1])
this.ctx.lineWidth = 8
this.ctx.lineCap = "round"
this.ctx.lineJoin = "round"
this.ctx.strokeStyle = "red"
this.ctx.stroke()
this.lastPosition = p
}
clrTmID: ReturnType<typeof setTimeout> | null = null
private fadeOut() {
let timeoutID: ReturnType<typeof setTimeout>
const fadeStep = () => {
if (!this.ctx || this.painting ) { return }
this.ctx.globalCompositeOperation = 'destination-out'
this.ctx.fillStyle = "rgba(255, 255, 255, 0.1)"
this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
this.ctx.globalCompositeOperation = 'source-over'
timeoutID = setTimeout(fadeStep,100)
}
this.clrTmID = setTimeout(() => {
clearTimeout(timeoutID)
this.ctx &&
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height)
}, 3700)
fadeStep()
}
mount(parent: HTMLElement) {
parent.appendChild(this.canvas)
this.ctx = this.canvas.getContext("2d")
window.addEventListener("resize", this.resizeCanvas)
this.resizeCanvas()
}
remove() {
if (this.canvas.parentNode){
this.canvas.parentNode.removeChild(this.canvas)
}
window.removeEventListener("resize", this.resizeCanvas)
}
}

View file

@ -7,8 +7,8 @@ import store from 'App/store';
import type { LocalStream } from './LocalStream';
import { update, getState } from '../../store';
import { iceServerConfigFromString } from 'App/utils'
import MStreamReader from '../messages/MStreamReader';;
import AnnotationCanvas from './AnnotationCanvas';
import MStreamReader from '../messages/MStreamReader';
import JSONRawMessageReader from '../messages/JSONRawMessageReader'
export enum CallingState {
@ -136,12 +136,14 @@ export default class AssistManager {
//socket.onAny((...args) => console.log(...args))
socket.on("connect", () => {
waitingForMessages = true
this.setStatus(ConnectionStatus.WaitingMessages)
this.setStatus(ConnectionStatus.WaitingMessages) // TODO: happens frequently on bad network
})
socket.on("disconnect", () => {
this.toggleRemoteControl(false)
update({ calling: CallingState.NoCall })
})
socket.on('messages', messages => {
//console.log(messages.filter(m => m._id === 41 || m._id === 44))
showDisconnectTimeout && clearTimeout(showDisconnectTimeout);
jmr.append(messages) // as RawMessage[]
@ -173,14 +175,15 @@ export default class AssistManager {
this.setStatus(ConnectionStatus.Disconnected)
}, 30000)
if (getState().remoteControl === RemoteControlStatus.Requesting ||
getState().remoteControl === RemoteControlStatus.Enabled) {
this.toggleRemoteControl(false)
if (getState().remoteControl === RemoteControlStatus.Requesting) {
this.toggleRemoteControl(false) // else its remaining
}
// Call State
if (getState().calling === CallingState.OnCall) {
update({ calling: CallingState.Reconnecting })
} else if (getState().calling === CallingState.Requesting){
update({ calling: CallingState.NoCall })
}
})
socket.on('error', e => {
@ -200,7 +203,7 @@ export default class AssistManager {
private onMouseMove = (e: MouseEvent): void => {
if (!this.socket) { return }
const data = this.md.getInternalCoordinates(e)
this.socket.emit("move", [ Math.round(data.x), Math.round(data.y) ])
this.socket.emit("move", [ data.x, data.y ])
}
private onWheel = (e: WheelEvent): void => {
@ -213,15 +216,23 @@ export default class AssistManager {
private onMouseClick = (e: MouseEvent): void => {
if (!this.socket) { return; }
const data = this.md.getInternalViewportCoordinates(e);
const data = this.md.getInternalViewportCoordinates(e)
// const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager
const el = this.md.getElementFromInternalPoint(data)
if (el instanceof HTMLElement) {
el.focus()
el.oninput = e => e.preventDefault();
el.onkeydown = e => e.preventDefault();
el.oninput = e => {
if (el instanceof HTMLTextAreaElement
|| el instanceof HTMLInputElement
) {
this.socket && this.socket.emit("input", el.value)
} else if (el.isContentEditable) {
this.socket && this.socket.emit("input", el.innerText)
}
}
//el.onkeydown = e => e.preventDefault()
}
this.socket.emit("click", [ Math.round(data.x), Math.round(data.y) ]);
this.socket.emit("click", [ data.x, data.y ]);
}
private toggleRemoteControl(newState: boolean){
@ -310,6 +321,8 @@ export default class AssistManager {
this.callConnection && this.callConnection.close()
update({ calling: CallingState.NoCall })
this.callArgs = null
this.annot?.remove()
this.annot = null
}
private initiateCallEnd = () => {
@ -355,6 +368,8 @@ export default class AssistManager {
}
}
private annot: AnnotationCanvas | null = null
private _call() {
if (![CallingState.NoCall, CallingState.Reconnecting].includes(getState().calling)) { return }
update({ calling: CallingState.Connecting })
@ -379,6 +394,34 @@ export default class AssistManager {
call.on('stream', stream => {
update({ calling: CallingState.OnCall })
this.callArgs && this.callArgs.onStream(stream)
if (!this.annot) {
const annot = this.annot = new AnnotationCanvas()
annot.mount(this.md.overlay)
annot.canvas.addEventListener("mousedown", e => {
if (!this.socket) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.start([ data.x, data.y ])
this.socket.emit("startAnnotation", [ data.x, data.y ])
})
annot.canvas.addEventListener("mouseleave", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mouseup", () => {
if (!this.socket) { return }
annot.stop()
this.socket.emit("stopAnnotation")
})
annot.canvas.addEventListener("mousemove", e => {
if (!this.socket || !annot.isPainting()) { return }
const data = this.md.getInternalViewportCoordinates(e)
annot.move([ data.x, data.y ])
this.socket.emit("moveAnnotation", [ data.x, data.y ])
})
}
});
//call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
@ -409,6 +452,10 @@ export default class AssistManager {
this.socket.close()
document.removeEventListener('visibilitychange', this.onVisChange)
}
if (this.annot) {
this.annot.remove()
this.annot = null
}
}
}

View file

@ -113,8 +113,15 @@ export default class DOMManager extends ListWalker<Message> {
logger.error("Node has no childNodes", this.nl[ parentID ]);
return;
}
if (this.nl[ id ] instanceof HTMLHtmlElement) {
// What if some exotic cases?
this.nl[ parentID ].replaceChild(this.nl[ id ], childNodes[childNodes.length-1])
return
}
this.nl[ parentID ]
.insertBefore(this.nl[ id ], childNodes[ index ]);
.insertBefore(this.nl[ id ], childNodes[ index ])
}
private applyMessage = (msg: Message): void => {
@ -257,14 +264,14 @@ export default class DOMManager extends ListWalker<Message> {
case "create_i_frame_document":
node = this.nl[ msg.frameID ];
// console.log('ifr', msg, node)
if (node instanceof HTMLIFrameElement) {
doc = node.contentDocument;
if (!doc) {
logger.warn("No iframe doc", msg, node, node.contentDocument);
return;
}
this.nl[ msg.id ] = doc.documentElement
this.nl[ msg.id ] = doc
return;
} else if (node instanceof Element) { // shadow DOM
try {

View file

@ -13,7 +13,7 @@ const oss = {
ORIGIN: () => 'window.location.origin',
API_EDP: () => 'window.location.origin + "/api"',
ASSETS_HOST: () => 'window.location.origin + "/assets"',
VERSION: '1.5.3',
VERSION: '1.5.4',
SOURCEMAP: true,
MINIO_ENDPOINT: process.env.MINIO_ENDPOINT,
MINIO_PORT: process.env.MINIO_PORT,
@ -21,7 +21,7 @@ const oss = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
ICE_SERVERS: process.env.ICE_SERVERS,
TRACKER_VERSION: '3.5.3' // trackerInfo.version,
TRACKER_VERSION: '3.5.4' // trackerInfo.version,
}
module.exports = {

View file

@ -0,0 +1,91 @@
\set ON_ERROR_STOP true
SET client_min_messages TO NOTICE;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3'
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@ -898,7 +898,23 @@ $$
CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';
CREATE INDEX autocomplete_value_graphqlonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'GRAPHQL';
CREATE INDEX autocomplete_value_inputonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'INPUT';
CREATE INDEX autocomplete_value_locationonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'LOCATION';
CREATE INDEX autocomplete_value_referreronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REFERRER';
CREATE INDEX autocomplete_value_requestonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REQUEST';
CREATE INDEX autocomplete_value_revidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'REVID';
CREATE INDEX autocomplete_value_stateactiononly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'STATEACTION';
CREATE INDEX autocomplete_value_useranonymousidonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERANONYMOUSID';
CREATE INDEX autocomplete_value_userbrowseronly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERBROWSER';
CREATE INDEX autocomplete_value_usercountryonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERCOUNTRY';
CREATE INDEX autocomplete_value_userdeviceonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERDEVICE';
CREATE INDEX autocomplete_value_useridonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USERID';
CREATE INDEX autocomplete_value_userosonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'USEROS';
CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed');
CREATE TYPE job_action AS ENUM ('delete_user_data');

View file

@ -40,3 +40,7 @@ dependencies:
repository: file://charts/redis
version: 12.10.1
condition: redis.enabled
- name: minio
repository: file://charts/minio
version: 3.7.14
condition: minio.enabled

View file

@ -99,6 +99,9 @@ redis:
cpu: 100m
memory: 128Mi
minio:
enabled: true
postgresql:
# postgresqlPassword: asayerPostgres
fullnameOverride: postgresql

View file

@ -15,7 +15,7 @@ fatal()
exit 1
}
version="v1.5.3"
version="v1.5.4"
usr=`whoami`
# Installing k3s

View file

@ -22,4 +22,4 @@ version: 0.1.0
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
# Ref: https://github.com/helm/helm/issues/7858#issuecomment-608114589
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -82,7 +82,7 @@ data:
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $origin_forwarded_ip;
proxy_set_header X-Real-IP $origin_forwarded_ip;
proxy_pass http://utilities-pool;
proxy_pass http://utilities-openreplay.app.svc.cluster.local:9001;
}
location /assets/ {
rewrite ^/assets/(.*) /sessions-assets/$1 break;

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -21,4 +21,4 @@ version: 0.1.0
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.5.3"
AppVersion: "v1.5.4"

View file

@ -83,6 +83,9 @@ autoscaling:
env:
REDIS_URL: "redis://redis-master.db.svc.cluster.local:6379"
debug: 0
uws: false
redis: false
nodeSelector: {}

View file

@ -12,9 +12,7 @@ mc alias set minio http://minio.db.svc.cluster.local:9000 $MINIO_ACCESS_KEY $MIN
function init() {
echo "Initializing minio"
for bucket in ${buckets[*]}; do
mc mb minio/${bucket} || true
mc ilm import minio/${bucket} <<EOF
cat <<EOF > /tmp/lifecycle.json
{
"Rules": [
{
@ -27,13 +25,17 @@ mc ilm import minio/${bucket} <<EOF
]
}
EOF
for bucket in ${buckets[*]}; do
mc mb minio/${bucket} || true
mc ilm import minio/${bucket} < /tmp/lifecycle.json || true
done
# Creating frontend bucket
mc mb minio/frontend || true
mc policy set download minio/frontend
mc policy set download minio/sessions-assets
mc policy set download minio/static
mc policy set download minio/frontend || true
mc policy set download minio/sessions-assets || true
mc policy set download minio/static || true
curl -L https://github.com/openreplay/openreplay/releases/download/v${CHART_APP_VERSION}/frontend.tar.gz -O
tar -xf frontend.tar.gz
@ -53,4 +55,3 @@ case "$1" in
exit 1
;;
esac

View file

@ -17,13 +17,13 @@ function migrate() {
IFS=',' read -r -a migration_versions <<< "$1"
for version in ${migration_versions[*]}; do
echo "Migrating postgresql version $version"
psql -f ${pgdir}/${version}/${version}.sql
psql -f ${pgdir}/${version}/${version}.sql 2>&1
done
}
function init() {
echo "Initializing postgresql"
psql -f ${pgdir}/init_schema.sql
psql -f ${pgdir}/init_schema.sql 2>&1
}
# /bin/bash postgresql.sh migrate $migration_versions

View file

@ -1,22 +1 @@
1. Get the application URL by running these commands:
{{- if .Values.ingress.enabled }}
{{- range $host := .Values.ingress.hosts }}
{{- range .paths }}
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
{{- end }}
{{- end }}
{{- else if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "openreplay.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "openreplay.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "openreplay.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "openreplay.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}
OpenReplay Installation is complete. Follow along with the SSL configuration from [doc](https://docs.openreplay.com/deployment/deploy-aws#configuretls/ssl).

View file

@ -1,4 +1,4 @@
fromVersion: "v1.5.3"
fromVersion: "v1.5.4"
# Databases specific variables
postgresql: &postgres
# For generating passwords
@ -95,13 +95,6 @@ chalice:
# idp_name: ''
# idp_tenantKey: ''
utilities:
replicaCount: 1
env:
debug: 0
uws: false
cluster: false
# If you want to override something
# chartname:
# filedFrom chart/Values.yaml:

View file

@ -9,8 +9,15 @@
<!--CSS -->
<!-- <link href="css/styles.css" rel="stylesheet"> -->
<style>
body {
margin: 0;
padding: 0;
}
.text-uppercase {
text-transform: uppercase;
}
.connecting-message {
margin-top: 50%;
/* margin-top: 50%; */
font-size: 20px;
color: #aaa;
text-align: center;
@ -19,31 +26,90 @@
}
.status-connecting .connecting-message {
display: block;
/* display: block; */
}
.status-connecting .card {
display: none;
/* display: none; */
}
.card{
min-width: 324px;
width: 350px;
max-width: 800px;
/*min-height: 220px;*/
max-height: 450px;
/*resize: both;
overflow: auto;*/
font: 14px 'Roboto', sans-serif;
/* min-width: 324px; */
width: 300px;
/* max-width: 800px; */
/* border: solid thin #ccc; */
/* box-shadow: 0 0 10px #aaa; */
border: solid 4px rgba(0, 0, 0, 0.2);
border-radius: 3px;
}
.card-footers {
display: flex;
border-bottom: solid thin #CCC;
padding: 5px 5px;
justify-content: space-between;
}
.card-footers .assist-controls {
display: flex;
align-items: center;
}
.btn-danger {
background-color: #CC0000 !important;
color: white;
}
.btn-danger:hover {
background-color: #FF0000 !important;
color: white;
}
.btn {
padding: 5px 8px;
font-size: 14px;
border-radius: 3px;
background-color: transparent;
border: none;
cursor: pointer;
display: flex;
align-items: center;
}
.btn span {
margin-left: 10px;
}
.btn:hover {
filter: brightness(0.9);
}
.card .card-header{
cursor: move;
padding: 14px 18px;
display: flex;
justify-content: space-between;
border-bottom: solid thin #ccc;
}
#agent-name, #duration{
cursor:default;
}
#video-container {
background-color: rgb(90, 90, 90);
position: relative;
overflow: hidden;
/* width: 300px; */
}
#video-container video {
width: 100% !important;
height: auto;
object-fit: cover;
}
#local-stream, #remote-stream {
display:none;
/* display:none; */ /* TODO uncomment this line */
}
#video-container.remote #remote-stream {
display: block;
@ -57,20 +123,30 @@
#local-stream{
width: 35%;
/* top: 50%; */
/* left: 70%; */
position: absolute;
z-index: 99;
bottom: 5px;
right: 5px;
border: thin solid rgba(255,255,255, .3);
overflow: hidden;
}
#audio-btn {
margin-right: 10px;
}
#audio-btn .bi-mic {
fill: #CC0000;
}
#audio-btn .bi-mic-mute {
display:none;
}
#audio-btn:after {
text-transform: capitalize;
content: 'Mute'
/* text-transform: capitalize; */
color: #CC0000;
content: 'Mute';
padding-left: 5px;
}
#audio-btn.muted .bi-mic-mute {
display: inline-block;
@ -79,19 +155,26 @@
display:none;
}
#audio-btn.muted:after {
content: 'Unmute'
content: 'Unmute';
padding-left: 5px;
}
#video-btn .bi-camera-video {
fill: #CC0000;
}
#video-btn .bi-camera-video-off {
display:none;
}
#video-btn:after {
text-transform: capitalize;
content: 'Stop Video'
/* text-transform: capitalize; */
color: #CC0000;
content: 'Stop Video';
padding-left: 5px;
}
#video-btn.off:after {
content: 'Start Video'
content: 'Start Video';
padding-left: 5px;
}
#video-btn.off .bi-camera-video-off {
display: inline-block;
@ -100,16 +183,201 @@
display:none;
}
/* CHART */
#chat-card {
display: flex;
flex-direction: column;
font-size: 14px;
background-color: white;
}
#chat-card .chat-messages { display: none; }
#chat-card .chat-input { display: none; }
#chat-card .chat-header .arrow-state { transform: rotate(180deg); }
#chat-card.active .chat-messages { display: flex; }
#chat-card.active .chat-input { display: flex; }
#chat-card.active .chat-header .arrow-state { transform: rotate(0deg); }
#chat-card .chat-header {
border-bottom: solid thin #ccc;
padding: 8px 16px;
display: flex;
justify-content: space-between;
cursor: pointer;
}
#chat-card .chat-header .chat-title {
display: flex;
align-items: center;
}
#chat-card .chat-header .chat-title span {
margin-left: 6px;
}
#chat-card .chat-messages {
padding: 8px 16px;
overflow-y: auto;
height: 250px;
overflow-y: auto;
flex-direction: column;
justify-content: flex-end;
}
#chat-card .message-text {
padding: 8px 16px;
border-radius: 20px;
color: #666666;
margin-bottom: 2px;
}
#chat-card .message .message-text {
/* max-width: 70%; */
width: fit-content;
}
#chat-card .message {
margin-bottom: 15px;
}
#chat-card .chat-messages .message.left .message-text {
text-align: left;
background: #D7E2E2;
border-radius: 0px 30px 30px 30px;
}
#chat-card .message .message-user {
font-size: 12px;
font-weight: bold;
color: #999999;
}
#chat-card .message .message-time {
font-size: 12px;
color: #999999;
margin-left: 4px;
}
#chat-card .chat-messages .message.right {
margin-left: auto;
text-align: right;
}
#chat-card .chat-messages .message.right .message-text {
background: #E4E4E4;
box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.15);
border-radius: 30px 30px 0px 30px;
}
#chat-card .chat-input {
margin: 10px;
border-radius: 3px;
box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.15);
background-color: #DDDDDD;
position: relative;
}
#chat-card .chat-input .input {
width: 100%;
border: none;
border-radius: 0px;
padding: 8px 16px;
font-size: 16px;
color: #333;
background-color: transparent;
}
.send-btn {
width: 26px;
height: 26px;
background-color: #AAA;
position: absolute;
right: 5px;
top: 0;
bottom: 0;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
margin: auto;
cursor: pointer;
}
.send-btn:hover {
background-color: #999;
}
.send-btn svg {
fill: #DDDDDD;
}
.confirm-window .title {
margin-bottom: 10px;
}
.confirm-window {
font: 14px 'Roboto', sans-serif;
padding: 20px;
background-color: #F3F3F3;
border-radius: 3px;
/* position: absolute; */
width: fit-content;
color: #666666;
display: none;
}
.confirm-window .actions {
background-color: white;
padding: 10px;
display: flex;
box-shadow: 0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1);
border-radius: 6px;
}
.btn-lg {
font-size: 14px;
padding: 10px 14px;
}
.btn-success {
background: rgba(0, 167, 47, 1);
color: white;
}
/* .btn-error:hover,
.btn-success:hover {
filter: brightness(0.9);
} */
.btn-error {
background: #FFE9E9;
/* border-color: #d43f3a; */
color: #CC0000;
}
</style>
<link href="css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
<div id="remote-control-confirm" class="confirm-window">
<div class="title">The agent is requesting remote control</div>
<div class="actions">
<button class="text-uppercase btn btn-lg btn-success" style="margin-right: 10px">Grant remote access</button>
<button class="text-uppercase btn btn-lg btn-error">Reject</button>
</div>
</div>
<div id="call-confirm" class="confirm-window">
<div class="title">Answer the call so the agent can assist.</div>
<div class="actions">
<button class="text-uppercase btn btn-lg btn-success" style="margin-right: 10px">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-telephone" viewBox="0 0 16 16">
<path d="M3.654 1.328a.678.678 0 0 0-1.015-.063L1.605 2.3c-.483.484-.661 1.169-.45 1.77a17.568 17.568 0 0 0 4.168 6.608 17.569 17.569 0 0 0 6.608 4.168c.601.211 1.286.033 1.77-.45l1.034-1.034a.678.678 0 0 0-.063-1.015l-2.307-1.794a.678.678 0 0 0-.58-.122l-2.19.547a1.745 1.745 0 0 1-1.657-.459L5.482 8.062a1.745 1.745 0 0 1-.46-1.657l.548-2.19a.678.678 0 0 0-.122-.58L3.654 1.328zM1.884.511a1.745 1.745 0 0 1 2.612.163L6.29 2.98c.329.423.445.974.315 1.494l-.547 2.19a.678.678 0 0 0 .178.643l2.457 2.457a.678.678 0 0 0 .644.178l2.189-.547a1.745 1.745 0 0 1 1.494.315l2.306 1.794c.829.645.905 1.87.163 2.611l-1.034 1.034c-.74.74-1.846 1.065-2.877.702a18.634 18.634 0 0 1-7.01-4.42 18.634 18.634 0 0 1-4.42-7.009c-.362-1.03-.037-2.137.703-2.877L1.885.511z"/>
</svg>
<span>Answer</span>
</button>
<button class="text-uppercase btn btn-lg btn-error">Reject</button>
</div>
</div>
<section id="or-assist" class="status-connecting">
<div class="connecting-message"> Connecting... </div>
<div class="card border-dark shadow">
<div class="card shadow">
<div class="drag-area card-header d-flex justify-content-between">
<div class="user-info">
<span>Call with</span>
@ -123,44 +391,98 @@
</div>
<div id="video-container" class="card-body bg-dark p-0 d-flex align-items-center position-relative">
<div id="local-stream" class="ratio ratio-4x3 rounded m-0 p-0 shadow">
<p class="text-white m-auto text-center">Starting video...</p>
<!-- <p class="text-white m-auto text-center">Starting video...</p> -->
<video id="video-local" autoplay muted></video>
</div>
<div id="remote-stream" class="ratio ratio-4x3 m-0 p-0">
<p id="remote-stream-placeholder" class="text-white m-auto text-center">Starting video...</p>
<!-- <p id="remote-stream-placeholder" class="text-white m-auto text-center">Starting video...</p> -->
<video id="video-remote" autoplay></video>
</div>
</div>
<div class="card-footer bg-transparent d-flex justify-content-between">
<div class="card-footers">
<div class="assist-controls">
<a href="#" id="audio-btn" class="btn btn-light btn-sm text-uppercase me-2"><i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic" viewBox="0 0 16 16">
<!-- Add class .muted to #audio-btn when user mutes audio -->
<button
href="#"
id="audio-btn"
class="btn btn-light btn-sm text-uppercase me-2"
>
<i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" class="bi bi-mic" viewBox="0 0 16 16">
<path d="M3.5 6.5A.5.5 0 0 1 4 7v1a4 4 0 0 0 8 0V7a.5.5 0 0 1 1 0v1a5 5 0 0 1-4.5 4.975V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 .5-.5z"/>
<path d="M10 8a2 2 0 1 1-4 0V3a2 2 0 1 1 4 0v5zM8 0a3 3 0 0 0-3 3v5a3 3 0 0 0 6 0V3a3 3 0 0 0-3-3z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-mic-mute" viewBox="0 0 16 16">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" class="bi bi-mic-mute" viewBox="0 0 16 16">
<path d="M13 8c0 .564-.094 1.107-.266 1.613l-.814-.814A4.02 4.02 0 0 0 12 8V7a.5.5 0 0 1 1 0v1zm-5 4c.818 0 1.578-.245 2.212-.667l.718.719a4.973 4.973 0 0 1-2.43.923V15h3a.5.5 0 0 1 0 1h-7a.5.5 0 0 1 0-1h3v-2.025A5 5 0 0 1 3 8V7a.5.5 0 0 1 1 0v1a4 4 0 0 0 4 4zm3-9v4.879l-1-1V3a2 2 0 0 0-3.997-.118l-.845-.845A3.001 3.001 0 0 1 11 3z"/>
<path d="m9.486 10.607-.748-.748A2 2 0 0 1 6 8v-.878l-1-1V8a3 3 0 0 0 4.486 2.607zm-7.84-9.253 12 12 .708-.708-12-12-.708.708z"/>
</svg>
</i></a>
<!-- Add class .mute to #audio-btn when user mutes audio -->
<a href="#" id="video-btn" class="off btn btn-light btn-sm text-uppercase ms-2"><i >
</i>
</button>
<!--Add class .off to #video-btn when user stops video -->
<button
href="#"
id="video-btn"
class="btn btn-light btn-sm text-uppercase ms-2"
>
<i>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M0 5a2 2 0 0 1 2-2h7.5a2 2 0 0 1 1.983 1.738l3.11-1.382A1 1 0 0 1 16 4.269v7.462a1 1 0 0 1-1.406.913l-3.111-1.382A2 2 0 0 1 9.5 13H2a2 2 0 0 1-2-2V5zm11.5 5.175 3.5 1.556V4.269l-3.5 1.556v4.35zM2 4a1 1 0 0 0-1 1v6a1 1 0 0 0 1 1h7.5a1 1 0 0 0 1-1V5a1 1 0 0 0-1-1H2z"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-camera-video-off" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M10.961 12.365a1.99 1.99 0 0 0 .522-1.103l3.11 1.382A1 1 0 0 0 16 11.731V4.269a1 1 0 0 0-1.406-.913l-3.111 1.382A2 2 0 0 0 9.5 3H4.272l.714 1H9.5a1 1 0 0 1 1 1v6a1 1 0 0 1-.144.518l.605.847zM1.428 4.18A.999.999 0 0 0 1 5v6a1 1 0 0 0 1 1h5.014l.714 1H2a2 2 0 0 1-2-2V5c0-.675.334-1.272.847-1.634l.58.814zM15 11.73l-3.5-1.555v-4.35L15 4.269v7.462zm-4.407 3.56-10-14 .814-.58 10 14-.814.58z"/>
</svg>
</i>
</button>
</div>
<button id="end-call-btn" href="#" class="btn btn-danger btn-sm text-uppercase" style="margin-right: 8px;">End</button>
</div>
</i></a>
<!--Add class .off to #video-btn when user stops video -->
<!-- CHAT - add .active class to show the messages and input -->
<div id="chat-card" class="active">
<div class="chat-header">
<div class="chat-title">
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" fill="currentColor" class="bi bi-chat" viewBox="0 0 16 16">
<path d="M2.678 11.894a1 1 0 0 1 .287.801 10.97 10.97 0 0 1-.398 2c1.395-.323 2.247-.697 2.634-.893a1 1 0 0 1 .71-.074A8.06 8.06 0 0 0 8 14c3.996 0 7-2.807 7-6 0-3.192-3.004-6-7-6S1 4.808 1 8c0 1.468.617 2.83 1.678 3.894zm-.493 3.905a21.682 21.682 0 0 1-.713.129c-.2.032-.352-.176-.273-.362a9.68 9.68 0 0 0 .244-.637l.003-.01c.248-.72.45-1.548.524-2.319C.743 11.37 0 9.76 0 8c0-3.866 3.582-7 8-7s8 3.134 8 7-3.582 7-8 7a9.06 9.06 0 0 1-2.347-.306c-.52.263-1.639.742-3.468 1.105z"/>
</svg>
<span>Chat</span>
</div>
<div class="assist-end">
<a id="end-call-btn" href="#" class="btn btn-danger btn-sm text-uppercase">End</a>
<div>
<svg xmlns="http://www.w3.org/2000/svg" width="18" height="18" class="bi bi-chevron-up arrow-state" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M7.646 4.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1-.708.708L8 5.707l-5.646 5.647a.5.5 0 0 1-.708-.708l6-6z"/>
</svg>
</div>
</div>
<div class="chat-messages">
<div class="message left">
<div class="message-text"> Hey, did you get the key? </div>
<div>
<span class="message-user">Username</span>
<span class="message-time"> 00:00 </span>
</div>
</div>
<div class="message right">
<div class="message-text">
Oui, merci!
</div>
<div>
<span class="message-user">Username</span>
<span class="message-time">00:00</span>
</div>
</div>
</div>
<div class="chat-input">
<input type="text" class="input" placeholder="Type a message...">
<div class="send-btn">
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" class="bi bi-arrow-right-short" viewBox="0 0 16 16">
<path fill-rule="evenodd" d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z"/>
</svg>
</div>
</div>
</div>
</div>
</section>

View file

@ -1,16 +1,15 @@
{
"name": "@openreplay/tracker-assist",
"version": "3.5.3",
"version": "3.5.7",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@openreplay/tracker-assist",
"version": "3.5.3",
"version": "3.5.7",
"license": "MIT",
"dependencies": {
"csstype": "^3.0.10",
"npm-dragndrop": "^1.2.0",
"peerjs": "^1.3.2",
"socket.io-client": "^4.4.1"
},
@ -21,12 +20,12 @@
"typescript": "^4.6.0-dev.20211126"
},
"peerDependencies": {
"@openreplay/tracker": "^3.5.0"
"@openreplay/tracker": "^3.5.3"
}
},
"../tracker": {
"name": "@openreplay/tracker",
"version": "3.5.2",
"version": "3.5.4",
"dev": true,
"license": "MIT",
"dependencies": {
@ -3338,11 +3337,6 @@
"node": ">=0.10.0"
}
},
"node_modules/npm-dragndrop": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/npm-dragndrop/-/npm-dragndrop-1.2.0.tgz",
"integrity": "sha1-bgUkAP7Yay8eP0csU4EPkjcRu7U="
},
"node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
@ -6487,11 +6481,6 @@
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"dev": true
},
"npm-dragndrop": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/npm-dragndrop/-/npm-dragndrop-1.2.0.tgz",
"integrity": "sha1-bgUkAP7Yay8eP0csU4EPkjcRu7U="
},
"p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-assist",
"description": "Tracker plugin for screen assistance through the WebRTC",
"version": "3.5.5",
"version": "3.5.7",
"keywords": [
"WebRTC",
"assistance",
@ -25,12 +25,11 @@
},
"dependencies": {
"csstype": "^3.0.10",
"npm-dragndrop": "^1.2.0",
"peerjs": "^1.3.2",
"socket.io-client": "^4.4.1"
},
"peerDependencies": {
"@openreplay/tracker": "^3.5.0"
"@openreplay/tracker": "^3.5.3"
},
"devDependencies": {
"@openreplay/tracker": "file:../tracker",

View file

@ -0,0 +1,81 @@
export default class AnnotationCanvas {
private canvas: HTMLCanvasElement
private ctx: CanvasRenderingContext2D | null = null
private painting: boolean = false
constructor() {
this.canvas = document.createElement('canvas')
Object.assign(this.canvas.style, {
position: "fixed",
left: 0,
top: 0,
pointerEvents: "none",
zIndex: 2147483647 - 2,
})
}
private resizeCanvas = () => {
this.canvas.width = window.innerWidth
this.canvas.height = window.innerHeight
}
private lastPosition: [number, number] = [0,0]
start = (p: [number, number]) => {
this.painting = true
this.clrTmID && clearTimeout(this.clrTmID)
this.lastPosition = p
}
stop = () => {
if (!this.painting) { return }
this.painting = false
this.fadeOut()
}
move = (p: [number, number]) =>{
if (!this.ctx || !this.painting) { return }
this.ctx.globalAlpha = 1.0
this.ctx.beginPath()
this.ctx.moveTo(this.lastPosition[0], this.lastPosition[1])
this.ctx.lineTo(p[0], p[1])
this.ctx.lineWidth = 8
this.ctx.lineCap = "round"
this.ctx.lineJoin = "round"
this.ctx.strokeStyle = "red"
this.ctx.stroke()
this.lastPosition = p
}
clrTmID: ReturnType<typeof setTimeout> | null = null
private fadeOut() {
let timeoutID: ReturnType<typeof setTimeout>
const fadeStep = () => {
if (!this.ctx || this.painting ) { return }
this.ctx.globalCompositeOperation = 'destination-out'
this.ctx.fillStyle = "rgba(255, 255, 255, 0.1)"
this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
this.ctx.globalCompositeOperation = 'source-over'
timeoutID = setTimeout(fadeStep,100)
}
this.clrTmID = setTimeout(() => {
clearTimeout(timeoutID)
this.ctx &&
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height)
}, 4000)
fadeStep()
}
mount() {
document.body.appendChild(this.canvas)
this.ctx = this.canvas.getContext("2d")
window.addEventListener("resize", this.resizeCanvas)
this.resizeCanvas()
}
remove() {
if (this.canvas.parentNode){
this.canvas.parentNode.removeChild(this.canvas)
}
window.removeEventListener("resize", this.resizeCanvas)
}
}

View file

@ -5,8 +5,9 @@ import type { Properties } from 'csstype';
import { App } from '@openreplay/tracker';
import RequestLocalStream from './LocalStream.js';
import Mouse from './Mouse.js';
import RemoteControl from './RemoteControl.js';
import CallWindow from './CallWindow.js';
import AnnotationCanvas from './AnnotationCanvas.js';
import ConfirmWindow, { callConfirmDefault, controlConfirmDefault } from './ConfirmWindow.js';
import type { Options as ConfirmOptions } from './ConfirmWindow.js';
@ -14,12 +15,12 @@ import type { Options as ConfirmOptions } from './ConfirmWindow.js';
//@ts-ignore peerjs hack for webpack5 (?!) TODO: ES/node modules;
Peer = Peer.default || Peer;
type BehinEndCallback = () => ((()=>{}) | void)
type StartEndCallback = () => ((()=>{}) | void)
export interface Options {
onAgentConnect: BehinEndCallback,
onCallStart: BehinEndCallback,
onRemoteControlStart: BehinEndCallback,
onAgentConnect: StartEndCallback,
onCallStart: StartEndCallback,
onRemoteControlStart: StartEndCallback,
session_calling_peer_key: string,
session_control_peer_key: string,
callConfirm: ConfirmOptions,
@ -39,8 +40,11 @@ enum CallingState {
};
// TODO typing????
type OptionalCallback = (()=>{}) | void
type Agent = {
onDisconnect: ((()=>{}) | void), // TODO: better types here
onDisconnect?: OptionalCallback,
onControlReleased?: OptionalCallback,
name?: string
//
}
@ -139,6 +143,34 @@ export default class Assist {
})
socket.onAny((...args) => app.debug.log("Socket:", ...args))
const remoteControl = new RemoteControl(
this.options,
id => {
this.agents[id].onControlReleased = this.options.onRemoteControlStart()
this.emit("control_granted", id)
},
id => {
const cb = this.agents[id].onControlReleased
delete this.agents[id].onControlReleased
typeof cb === "function" && cb()
this.emit("control_rejected", id)
},
)
// TODO: check incoming args
socket.on("request_control", remoteControl.requestControl)
socket.on("release_control", remoteControl.releaseControl)
socket.on("scroll", remoteControl.scroll)
socket.on("click", remoteControl.click)
socket.on("move", remoteControl.move)
socket.on("input", remoteControl.input)
let annot: AnnotationCanvas | null = null
socket.on("moveAnnotation", (_, p) => annot && annot.move(p)) // TODO: restrict by id
socket.on("startAnnotation", (_, p) => annot && annot.start(p))
socket.on("stopAnnotation", () => annot && annot.stop())
socket.on("NEW_AGENT", (id: string, info) => {
this.agents[id] = {
onDisconnect: this.options.onAgentConnect && this.options.onAgentConnect(),
@ -148,7 +180,7 @@ export default class Assist {
this.app.stop();
this.app.start().then(() => { this.assistDemandedRestart = false })
})
socket.on("AGENTS_CONNECTED", (ids) => {
socket.on("AGENTS_CONNECTED", (ids: string[]) => {
ids.forEach(id =>{
this.agents[id] = {
onDisconnect: this.options.onAgentConnect && this.options.onAgentConnect(),
@ -157,75 +189,24 @@ export default class Assist {
this.assistDemandedRestart = true
this.app.stop();
this.app.start().then(() => { this.assistDemandedRestart = false })
const storedControllingAgent = sessionStorage.getItem(this.options.session_control_peer_key)
if (storedControllingAgent !== null && ids.includes(storedControllingAgent)) {
grantControl(storedControllingAgent)
socket.emit("control_granted", storedControllingAgent)
} else {
sessionStorage.removeItem(this.options.session_control_peer_key)
}
remoteControl.reconnect(ids)
})
let confirmRC: ConfirmWindow | null = null
const mouse = new Mouse() // TODO: lazy init
let controllingAgent: string | null = null
const requestControl = (id: string) => {
if (controllingAgent !== null) {
socket.emit("control_rejected", id)
return
}
controllingAgent = id // TODO: more explicit pending state
confirmRC = new ConfirmWindow(controlConfirmDefault(this.options.controlConfirm))
confirmRC.mount().then(allowed => {
if (allowed) {
grantControl(id)
socket.emit("control_granted", id)
} else {
releaseControl()
socket.emit("control_rejected", id)
}
}).catch()
}
let onRemoteControlStop: (()=>void) | null = null
const grantControl = (id: string) => {
controllingAgent = id
mouse.mount()
onRemoteControlStop = this.options.onRemoteControlStart() || null
sessionStorage.setItem(this.options.session_control_peer_key, id)
}
const releaseControl = () => {
typeof onRemoteControlStop === 'function' && onRemoteControlStop()
onRemoteControlStop = null
confirmRC?.remove()
mouse.remove()
controllingAgent = null
sessionStorage.removeItem(this.options.session_control_peer_key)
}
socket.on("request_control", requestControl)
socket.on("release_control", (id: string) => {
if (controllingAgent !== id) { return }
releaseControl()
})
socket.on("scroll", (id, d) => { id === controllingAgent && mouse.scroll(d) })
socket.on("click", (id, xy) => { id === controllingAgent && mouse.click(xy) })
socket.on("move", (id, xy) => { id === controllingAgent && mouse.move(xy) })
let confirmCall:ConfirmWindow | null = null
socket.on("AGENT_DISCONNECTED", (id) => {
// @ts-ignore (wtf, typescript?!)
this.agents[id] && this.agents[id].onDisconnect != null && this.agents[id].onDisconnect()
delete this.agents[id]
controllingAgent === id && releaseControl()
remoteControl.releaseControl(id)
// close the call also
if (callingAgent === id) {
confirmCall?.remove()
this.onRemoteCallEnd()
}
// @ts-ignore (wtf, typescript?!)
this.agents[id] && this.agents[id].onDisconnect != null && this.agents[id].onDisconnect()
delete this.agents[id]
})
socket.on("NO_AGENT", () => {
this.agents = {}
@ -281,11 +262,20 @@ export default class Assist {
style: this.options.confirmStyle,
}))
confirmAnswer = confirmCall.mount()
this.playNotificationSound()
this.onRemoteCallEnd = () => { // if call cancelled by a caller before confirmation
app.debug.log("Received call_end during confirm window opened")
confirmCall?.remove()
setCallingState(CallingState.False)
call.close()
}
setTimeout(() => {
if (this.callingState !== CallingState.Requesting) { return }
call.close()
confirmCall?.remove()
this.notifyCallEnd()
setCallingState(CallingState.False)
}, 30000)
}
confirmAnswer.then(agreed => {
@ -296,13 +286,18 @@ export default class Assist {
return
}
let callUI = new CallWindow()
const callUI = new CallWindow()
annot = new AnnotationCanvas()
annot.mount()
callUI.setAssistentName(agentName)
const onCallEnd = this.options.onCallStart()
const handleCallEnd = () => {
app.debug.log("Handle Call End")
call.close()
callUI.remove()
annot && annot.remove()
annot = null
setCallingState(CallingState.False)
onCallEnd && onCallEnd()
}
@ -350,6 +345,16 @@ export default class Assist {
});
}
private playNotificationSound() {
if ('Audio' in window) {
new Audio("https://static.openreplay.com/tracker-assist/notification.mp3")
.play()
.catch(e => {
this.app.debug.warn(e)
})
}
}
private clean() {
if (this.peer) {
this.peer.destroy()

View file

@ -1,4 +1,5 @@
import type { LocalStream } from './LocalStream.js';
import attachDND from './dnd';
const SS_START_TS_KEY = "__openreplay_assist_call_start_ts"
@ -18,20 +19,21 @@ export default class CallWindow {
private load: Promise<void>
constructor() {
const iframe = this.iframe = document.createElement('iframe');
const iframe = this.iframe = document.createElement('iframe')
Object.assign(iframe.style, {
position: "fixed",
zIndex: 2147483647 - 1,
//borderRadius: ".25em .25em .4em .4em",
//border: "4px rgba(0, 0, 0, .7)",
border: "none",
bottom: "10px",
right: "10px",
background: "white",
height: "200px",
width: "200px",
});
document.body.appendChild(iframe);
})
// TODO: find the best attribute name for the ignoring iframes
iframe.setAttribute("data-openreplay-obscured", "")
iframe.setAttribute("data-openreplay-hidden", "")
iframe.setAttribute("data-openreplay-ignore", "")
document.body.appendChild(iframe)
const doc = iframe.contentDocument;
if (!doc) {
@ -91,22 +93,10 @@ export default class CallWindow {
}, 500);
}
// TODO: better D'n'D
// mb set cursor:move here?
doc.body.setAttribute("draggable", "true");
doc.body.ondragstart = (e) => {
if (!e.dataTransfer || !e.target) { return; }
//@ts-ignore
if (!e.target.classList || !e.target.classList.contains("drag-area")) { return; }
e.dataTransfer.setDragImage(doc.body, e.clientX, e.clientY);
};
doc.body.ondragend = e => {
Object.assign(iframe.style, {
left: `${e.clientX}px`, // TODO: fix the case when ecoordinates are inside the iframe
top: `${e.clientY}px`,
bottom: 'auto',
right: 'auto',
})
const dragArea = doc.querySelector(".drag-area")
if (dragArea) {
// TODO: save coordinates on the new page
attachDND(iframe, dragArea, doc.documentElement)
}
});

View file

@ -2,75 +2,94 @@ import type { Properties } from 'csstype';
import { declineCall, acceptCall, cross, remoteControl } from './icons.js'
type ButtonOptions = HTMLButtonElement | string | {
innerHTML: string,
style?: Properties,
}
const TEXT_GRANT_REMORTE_ACCESS = "Grant Remote Access";
const TEXT_REJECT = "Reject";
const TEXT_ANSWER_CALL = `${acceptCall} &#xa0 Answer`;
type ButtonOptions =
| HTMLButtonElement
| string
| {
innerHTML: string;
style?: Properties;
};
// TODO: common strategy for InputOptions/defaultOptions merging
interface ConfirmWindowOptions {
text: string,
style?: Properties,
confirmBtn: ButtonOptions,
declineBtn: ButtonOptions,
text: string;
style?: Properties;
confirmBtn: ButtonOptions;
declineBtn: ButtonOptions;
}
export type Options = string | Partial<ConfirmWindowOptions>
export type Options = string | Partial<ConfirmWindowOptions>;
function confirmDefault(
opts: Options,
confirmBtn: ButtonOptions,
declineBtn: ButtonOptions,
text: string,
text: string
): ConfirmWindowOptions {
const isStr = typeof opts === "string"
return Object.assign({
text: isStr ? opts : text,
confirmBtn,
declineBtn,
}, isStr ? undefined : opts)
const isStr = typeof opts === "string";
return Object.assign(
{
text: isStr ? opts : text,
confirmBtn,
declineBtn
},
isStr ? undefined : opts
);
}
export const callConfirmDefault = (opts: Options) =>
confirmDefault(opts, acceptCall, declineCall, "You have an incoming call. Do you want to answer?")
export const controlConfirmDefault = (opts: Options) =>
confirmDefault(opts, remoteControl, cross, "Allow remote control?")
export const callConfirmDefault = (opts: Options) =>
confirmDefault(
opts,
TEXT_ANSWER_CALL,
TEXT_REJECT,
"You have an incoming call. Do you want to answer?"
);
export const controlConfirmDefault = (opts: Options) =>
confirmDefault(
opts,
TEXT_GRANT_REMORTE_ACCESS,
TEXT_REJECT,
"Allow remote control?"
);
function makeButton(options: ButtonOptions): HTMLButtonElement {
if (options instanceof HTMLButtonElement) {
return options
return options;
}
const btn = document.createElement('button')
const btn = document.createElement("button");
Object.assign(btn.style, {
background: "transparent",
padding: 0,
margin: 0,
border: 0,
padding: "10px 14px",
fontSize: "14px",
borderRadius: "3px",
border: "none",
cursor: "pointer",
borderRadius: "50%",
width: "22px",
height: "22px",
color: "white", // TODO: nice text button in case when only text is passed
})
display: "flex",
alignItems: "center",
textTransform: "uppercase",
marginRight: "10px"
});
if (typeof options === "string") {
btn.innerHTML = options
btn.innerHTML = options;
} else {
btn.innerHTML = options.innerHTML
Object.assign(btn.style, options.style)
btn.innerHTML = options.innerHTML;
Object.assign(btn.style, options.style);
}
return btn
return btn;
}
export default class ConfirmWindow {
private wrapper: HTMLDivElement;
constructor(options: ConfirmWindowOptions) {
const wrapper = document.createElement('div');
const popup = document.createElement('div');
const p = document.createElement('p');
const wrapper = document.createElement("div");
const popup = document.createElement("div");
const p = document.createElement("p");
p.innerText = options.text;
const buttons = document.createElement('div');
const buttons = document.createElement("div");
const confirmBtn = makeButton(options.confirmBtn);
const declineBtn = makeButton(options.declineBtn);
buttons.appendChild(confirmBtn);
@ -78,27 +97,45 @@ export default class ConfirmWindow {
popup.appendChild(p);
popup.appendChild(buttons);
Object.assign(confirmBtn.style, {
background: "rgba(0, 167, 47, 1)",
color: "white"
});
Object.assign(declineBtn.style, {
background: "#FFE9E9",
color: "#CC0000"
});
Object.assign(buttons.style, {
marginTop: "10px",
display: "flex",
alignItems: "center",
justifyContent: "space-evenly",
// justifyContent: "space-evenly",
backgroundColor: "white",
padding: "10px",
boxShadow: "0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1)",
borderRadius: "6px"
});
Object.assign(popup.style, {
position: "relative",
pointerEvents: "auto",
margin: "4em auto",
width: "90%",
maxWidth: "400px",
padding: "25px 30px",
background: "black",
opacity: ".75",
color: "white",
textAlign: "center",
borderRadius: ".25em .25em .4em .4em",
boxShadow: "0 0 20px rgb(0 0 0 / 20%)",
}, options.style);
Object.assign(
popup.style,
{
font: "14px 'Roboto', sans-serif",
position: "relative",
pointerEvents: "auto",
margin: "4em auto",
width: "90%",
maxWidth: "fit-content",
padding: "20px",
background: "#F3F3F3",
//opacity: ".75",
color: "black",
borderRadius: "3px",
boxShadow: "0px 0px 3.99778px 1.99889px rgba(0, 0, 0, 0.1)"
},
options.style
);
Object.assign(wrapper.style, {
position: "fixed",
@ -107,8 +144,8 @@ export default class ConfirmWindow {
height: "100%",
width: "100%",
pointerEvents: "none",
zIndex: 2147483647 - 1,
})
zIndex: 2147483647 - 1
});
wrapper.appendChild(popup);
this.wrapper = wrapper;
@ -116,18 +153,19 @@ export default class ConfirmWindow {
confirmBtn.onclick = () => {
this._remove();
this.resolve(true);
}
};
declineBtn.onclick = () => {
this._remove();
this.resolve(false);
}
};
}
private resolve: (result: boolean) => void = ()=>{};
private reject: ()=>void = ()=>{};
private resolve: (result: boolean) => void = () => {};
private reject: () => void = () => {};
mount(): Promise<boolean> {
document.body.appendChild(this.wrapper);
return new Promise((resolve, reject) => {
this.resolve = resolve;
this.reject = reject;
@ -135,7 +173,9 @@ export default class ConfirmWindow {
}
private _remove() {
if (!this.wrapper.parentElement) { return; }
if (!this.wrapper.parentElement) {
return;
}
document.body.removeChild(this.wrapper);
}
remove() {

View file

@ -45,7 +45,9 @@ export default class Mouse {
if (el instanceof HTMLElement) {
el.click()
el.focus()
return el
}
return null
}
private readonly pScrEl = document.scrollingElement || document.documentElement // Is it always correct

View file

@ -0,0 +1,88 @@
import Mouse from './Mouse.js';
import ConfirmWindow, { controlConfirmDefault } from './ConfirmWindow.js';
import type { Options as AssistOptions } from './Assist'
enum RCStatus {
Disabled,
Requesting,
Enabled,
}
export default class RemoteControl {
private mouse: Mouse | null
private status: RCStatus = RCStatus.Disabled
private agentID: string | null = null
constructor(
private options: AssistOptions,
private onGrand: (sting?) => void,
private onRelease: (sting?) => void) {}
reconnect(ids: string[]) {
const storedID = sessionStorage.getItem(this.options.session_control_peer_key)
if (storedID !== null && ids.includes(storedID)) {
this.grantControl(storedID)
} else {
sessionStorage.removeItem(this.options.session_control_peer_key)
}
}
private confirm: ConfirmWindow | null = null
requestControl = (id: string) => {
if (this.agentID !== null) {
this.releaseControl(id)
return
}
setTimeout(() =>{
if (this.status === RCStatus.Requesting) {
this.releaseControl(id)
}
}, 30000)
this.agentID = id
this.status = RCStatus.Requesting
this.confirm = new ConfirmWindow(controlConfirmDefault(this.options.controlConfirm))
this.confirm.mount().then(allowed => {
if (allowed) {
this.grantControl(id)
} else {
this.releaseControl(id)
}
}).catch()
}
grantControl = (id: string) => {
this.agentID = id
this.status = RCStatus.Enabled
this.mouse = new Mouse()
this.mouse.mount()
sessionStorage.setItem(this.options.session_control_peer_key, id)
this.onGrand(id)
}
releaseControl = (id: string) => {
if (this.agentID !== id) { return }
this.confirm?.remove()
this.mouse?.remove()
this.mouse = null
this.status = RCStatus.Disabled
this.agentID = null
sessionStorage.removeItem(this.options.session_control_peer_key)
this.onRelease(id)
}
scroll = (id, d) => { id === this.agentID && this.mouse?.scroll(d) }
move = (id, xy) => { id === this.agentID && this.mouse?.move(xy) }
private focused: HTMLElement | null = null
click = (id, xy) => {
if (id !== this.agentID || !this.mouse) { return }
this.focused = this.mouse.click(xy)
}
input = (id, value) => {
if (id !== this.agentID || !this.mouse || !this.focused) { return }
if (this.focused instanceof HTMLTextAreaElement
|| this.focused instanceof HTMLInputElement) {
this.focused.value = value
} else if (this.focused.isContentEditable) {
this.focused.innerText = value
}
}
}

View file

@ -0,0 +1,66 @@
/*
Here implemented the case when both dragArea and dropArea
are located inside the document of the dragging iframe.
Thus, all the events belong and relate to that inside document.
*/
export default function attachDND(
movingEl: HTMLIFrameElement,
dragArea: Element,
dropArea: Element,
) {
dragArea.addEventListener('pointerdown', userPressed, { passive: true })
let bbox,
startX, startY,
raf,
deltaX = 0, deltaY = 0
function userPressed(event) {
startX = event.clientX
startY = event.clientY
bbox = movingEl.getBoundingClientRect()
dropArea.addEventListener('pointermove', userMoved, { passive: true })
dropArea.addEventListener('pointerup', userReleased, { passive: true })
dropArea.addEventListener('pointercancel', userReleased, { passive: true })
};
/*
In case where the dropArea moves along with the dragging object
we can only append deltas, but not to define each time it moves.
*/
function userMoved(event) {
if (!raf) {
deltaX += event.clientX - startX
deltaY += event.clientY - startY
deltaX = Math.min(
Math.max(deltaX, -bbox.left),
window.innerWidth - bbox.right,
)
deltaY = Math.min(
Math.max(deltaY, -bbox.top),
window.innerHeight - bbox.bottom,
)
raf = requestAnimationFrame(userMovedRaf)
}
}
function userMovedRaf() {
movingEl.style.transform = "translate3d("+deltaX+"px,"+deltaY+"px, 0px)";
raf = null;
}
function userReleased() {
dropArea.removeEventListener('pointermove', userMoved)
dropArea.removeEventListener('pointerup', userReleased)
dropArea.removeEventListener('pointercancel', userReleased)
if (raf) {
cancelAnimationFrame(raf)
raf = null
}
movingEl.style.left = bbox.left + deltaX + "px"
movingEl.style.top = bbox.top + deltaY + "px"
movingEl.style.transform = "translate3d(0px,0px,0px)"
deltaX = deltaY = 0
}
}

View file

@ -2,7 +2,9 @@
// TODO: something with these big strings in bundle?
export const declineCall = `<svg xmlns="http://www.w3.org/2000/svg" height="22" width="22" viewBox="0 0 128 128" ><g id="Circle_Grid" data-name="Circle Grid"><circle cx="64" cy="64" fill="#ef5261" r="64"/></g><g id="icon"><path d="m57.831 70.1c8.79 8.79 17.405 12.356 20.508 9.253l4.261-4.26a7.516 7.516 0 0 1 10.629 0l9.566 9.566a7.516 7.516 0 0 1 0 10.629l-7.453 7.453c-7.042 7.042-27.87-2.358-47.832-22.319-9.976-9.981-16.519-19.382-20.748-28.222s-5.086-16.091-1.567-19.61l7.453-7.453a7.516 7.516 0 0 1 10.629 0l9.566 9.563a7.516 7.516 0 0 1 0 10.629l-4.264 4.271c-3.103 3.1.462 11.714 9.252 20.5z" fill="#eeefee"/></g></svg>`;
export const declineCall = `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-telephone" viewBox="0 0 16 16">
<path d="M3.654 1.328a.678.678 0 0 0-1.015-.063L1.605 2.3c-.483.484-.661 1.169-.45 1.77a17.568 17.568 0 0 0 4.168 6.608 17.569 17.569 0 0 0 6.608 4.168c.601.211 1.286.033 1.77-.45l1.034-1.034a.678.678 0 0 0-.063-1.015l-2.307-1.794a.678.678 0 0 0-.58-.122l-2.19.547a1.745 1.745 0 0 1-1.657-.459L5.482 8.062a1.745 1.745 0 0 1-.46-1.657l.548-2.19a.678.678 0 0 0-.122-.58L3.654 1.328zM1.884.511a1.745 1.745 0 0 1 2.612.163L6.29 2.98c.329.423.445.974.315 1.494l-.547 2.19a.678.678 0 0 0 .178.643l2.457 2.457a.678.678 0 0 0 .644.178l2.189-.547a1.745 1.745 0 0 1 1.494.315l2.306 1.794c.829.645.905 1.87.163 2.611l-1.034 1.034c-.74.74-1.846 1.065-2.877.702a18.634 18.634 0 0 1-7.01-4.42 18.634 18.634 0 0 1-4.42-7.009c-.362-1.03-.037-2.137.703-2.877L1.885.511z"/>
</svg>`;
export const acceptCall = declineCall.replace('fill="#ef5261"', 'fill="green"')

View file

@ -1,19 +1,51 @@
Copyright (c) 2021 OpenReplay.com <support@openreplay.com>
Copyright (c) 2022 Asayer, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
Reach out (license@openreplay.com) if you have any questions regarding the license.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
------------------------------------------------------------------------------------
Elastic License 2.0 (ELv2)
**Acceptance**
By using the software, you agree to all of the terms and conditions below.
**Copyright License**
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject to the limitations and conditions below
**Limitations**
You may not provide the software to third parties as a hosted or managed service, where the service provides users with access to any substantial set of the features or functionality of the software.
You may not move, change, disable, or circumvent the license key functionality in the software, and you may not remove or obscure any functionality in the software that is protected by the license key.
You may not alter, remove, or obscure any licensing, copyright, or other notices of the licensor in the software. Any use of the licensors trademarks is subject to applicable law.
**Patents**
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case subject to the limitations and conditions in this license. This license does not cover any patent claims that you cause to be infringed by modifications or additions to the software. If you or your company make any written claim that the software infringes or contributes to infringement of any patent, your patent license for the software granted under these terms ends immediately. If your company makes such a claim, your patent license ends immediately for work on behalf of your company.
**Notices**
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these terms.
If you modify the software, you must include in any modified copies of the software prominent notices stating that you have modified the software.
**No Other Rights**
These terms do not imply any licenses other than those expressly granted in these terms.
**Termination**
If you use the software in violation of these terms, such use is not licensed, and your licenses will automatically terminate. If the licensor provides you with a notice of your violation, and you cease all violation of this license no later than 30 days after you receive that notice, your licenses will be reinstated retroactively. However, if you violate these terms after such reinstatement, any additional violation of these terms will cause your licenses to terminate automatically and permanently.
**No Liability**
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will not be liable to you for any damages arising out of these terms or the use or nature of the software, under any kind of legal claim.
**Definitions**
The *licensor* is the entity offering these terms, and the *software* is the software the licensor makes available under these terms, including any portion of it.
*you* refers to the individual or entity agreeing to these terms.
*your company* is any legal entity, sole proprietorship, or other kind of organization that you work for, plus all organizations that have control over, are under the control of, or are under common control with that organization. *control* means ownership of substantially all the assets of an entity, or the power to direct its management and policies by vote, contract, or otherwise. Control can be direct or indirect.
*your licenses* are all the licenses granted to you for the software under these terms.
*use* means anything you do with the software requiring one of your licenses.
*trademark* means trademarks, service marks, and similar rights.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-fetch",
"description": "Tracker plugin for fetch requests recording ",
"version": "3.5.1",
"version": "3.5.2",
"keywords": [
"fetch",
"logging",

View file

@ -33,9 +33,10 @@ export default function(opts: Partial<Options> = {}) {
},
opts,
);
const origFetch = window.fetch
return (app: App | null) => {
if (app === null) {
return window.fetch;
return origFetch
}
const ihOpt = options.ignoreHeaders
@ -45,7 +46,7 @@ export default function(opts: Partial<Options> = {}) {
const fetch = async (input: RequestInfo, init: RequestInit = {}) => {
if (typeof input !== 'string') {
return window.fetch(input, init);
return origFetch(input, init);
}
if (options.sessionTokenHeader) {
const sessionToken = app.getSessionToken();
@ -63,7 +64,7 @@ export default function(opts: Partial<Options> = {}) {
}
}
const startTime = performance.now();
const response = await window.fetch(input, init);
const response = await origFetch(input, init);
const duration = performance.now() - startTime;
if (options.failuresOnly && response.status < 400) {
return response

View file

@ -1,4 +1,5 @@
node_modules
npm-debug.log
lib
cjs
.cache

View file

@ -1,18 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tracker_1 = require("@openreplay/tracker/cjs");
function default_1() {
return (app) => {
if (app === null) {
return (name) => (fn, thisArg) => thisArg === undefined ? fn : fn.bind(thisArg);
}
return (name) => (fn, thisArg) => (...args) => {
const startTime = performance.now();
const result = thisArg === undefined ? fn.apply(this, args) : fn.apply(thisArg, args);
const duration = performance.now() - startTime;
app.send(tracker_1.Messages.Profiler(name, duration, args.map(String).join(', '), String(result)));
return result;
};
};
}
exports.default = default_1;

View file

@ -1 +0,0 @@
{ "type": "commonjs" }

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-redux",
"description": "Tracker plugin for Redux state recording",
"version": "3.4.8",
"version": "3.5.0",
"keywords": [
"redux",
"logging",
@ -23,11 +23,11 @@
},
"dependencies": {},
"peerDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"redux": "^4.0.0"
},
"devDependencies": {
"@openreplay/tracker": "^3.4.8",
"@openreplay/tracker": "^3.5.0",
"prettier": "^1.18.2",
"replace-in-files-cli": "^1.0.0",
"typescript": "^4.6.0-dev.20211126"

View file

@ -23,8 +23,11 @@ export default function(opts: Partial<Options> = {}) {
return () => next => action => next(action);
}
const encoder = new Encoder(sha1, 50);
app.attachStopCallback(() => {
encoder.clear()
})
return ({ getState }) => next => action => {
if (!options.actionFilter(action)) {
if (!app.active() || !options.actionFilter(action)) {
return next(action);
}
const startTime = performance.now();

View file

@ -5,7 +5,7 @@
"alwaysStrict": true,
"target": "es6",
"module": "es6",
"moduleResolution": "nodenext",
"moduleResolution": "node",
"declaration": true,
"outDir": "./lib"
}

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "3.5.3",
"version": "3.5.4",
"keywords": [
"logging",
"replay"

View file

@ -41,32 +41,57 @@ export function isInstance<T extends WindowConstructor>(node: Node, constr: Cons
// @ts-ignore (for EI, Safary)
doc.parentWindow ||
doc.defaultView; // TODO: smart global typing for Window object
while(context.parent && context.parent !== context) {
while((context.parent || context.top) && context.parent !== context) {
// @ts-ignore
if (node instanceof context[constr.name]) {
return true
}
// @ts-ignore
context = context.parent
context = context.parent || context.top
}
// @ts-ignore
return node instanceof context[constr.name]
}
export function inDocument(node: Node): boolean {
// TODO: ensure 1. it works in every cases (iframes/detached nodes) and 2. the most efficient
export function inDocument(node: Node) {
const doc = node.ownerDocument
if (!doc) { return false }
if (doc.contains(node)) { return true }
let context: Window =
// @ts-ignore (for EI, Safary)
doc.parentWindow ||
doc.defaultView;
while(context.parent && context.parent !== context) {
if (context.document.contains(node)) {
if (!doc) { return true } // Document
let current: Node | null = node
while(current) {
if (current === doc) {
return true
} else if(isInstance(current, ShadowRoot)) {
current = current.host
} else {
current = current.parentNode
}
// @ts-ignore
context = context.parent
}
return false;
return false
}
// export function inDocument(node: Node): boolean {
// // @ts-ignore compatability
// if (node.getRootNode) {
// let root: Node
// while ((root = node.getRootNode()) !== node) {
// ////
// }
// }
// const doc = node.ownerDocument
// if (!doc) { return false }
// if (doc.contains(node)) { return true }
// let context: Window =
// // @ts-ignore (for EI, Safary)
// doc.parentWindow ||
// doc.defaultView;
// while(context.parent && context.parent !== context) {
// if (context.document.contains(node)) {
// return true
// }
// // @ts-ignore
// context = context.parent
// }
// return false;
// }

View file

@ -1,4 +1,3 @@
import { hasOpenreplayAttribute } from "../../utils.js";
import {
RemoveNodeAttribute,
SetNodeAttribute,
@ -59,9 +58,7 @@ export default abstract class Observer {
private readonly indexes: Array<number> = [];
private readonly attributesList: Array<Set<string> | undefined> = [];
private readonly textSet: Set<number> = new Set();
private readonly inUpperContext: boolean;
constructor(protected readonly app: App, protected readonly context: Window = window) {
this.inUpperContext = context.parent === context //TODO: get rid of context here
constructor(protected readonly app: App, protected readonly isTopContext = false) {
this.observer = new MutationObserver(
this.app.safe((mutations) => {
for (const mutation of mutations) {
@ -226,7 +223,7 @@ export default abstract class Observer {
// Disable parent check for the upper context HTMLHtmlElement, because it is root there... (before)
// TODO: get rid of "special" cases (there is an issue with CreateDocument altered behaviour though)
// TODO: Clean the logic (though now it workd fine)
if (!isInstance(node, HTMLHtmlElement) || !this.inUpperContext) {
if (!isInstance(node, HTMLHtmlElement) || !this.isTopContext) {
if (parent === null) {
this.unbindNode(node);
return false;
@ -321,6 +318,8 @@ export default abstract class Observer {
for (let id = 0; id < this.recents.length; id++) {
// TODO: make things/logic nice here.
// commit required in any case if recents[id] true or false (in case of unbinding) or undefined (in case of attr change).
// Possible solution: separate new node commit (recents) and new attribute/move node commit
// Otherwise commitNode is called on each node, which might be a lot
if (!this.myNodes[id]) { continue }
this.commitNode(id);
if (this.recents[id] === true && (node = this.app.nodes.getNode(id))) {

View file

@ -6,7 +6,7 @@ import ShadowRootObserver from "./shadow_root_observer.js";
import { CreateDocument } from "../../../messages/index.js";
import App from "../index.js";
import { IN_BROWSER } from '../../utils.js'
import { IN_BROWSER, hasOpenreplayAttribute } from '../../utils.js'
export interface Options {
captureIFrames: boolean
@ -17,15 +17,16 @@ const attachShadowNativeFn = IN_BROWSER ? Element.prototype.attachShadow : ()=>n
export default class TopObserver extends Observer {
private readonly options: Options;
constructor(app: App, options: Partial<Options>) {
super(app);
super(app, true);
this.options = Object.assign({
captureIFrames: false
captureIFrames: true
}, options);
// IFrames
this.app.nodes.attachNodeCallback(node => {
if (isInstance(node, HTMLIFrameElement) &&
(this.options.captureIFrames || node.getAttribute("data-openreplay-capture"))
((this.options.captureIFrames && !hasOpenreplayAttribute(node, "obscured"))
|| hasOpenreplayAttribute(node, "capture"))
) {
this.handleIframe(node)
}
@ -42,26 +43,25 @@ export default class TopObserver extends Observer {
private iframeObservers: IFrameObserver[] = [];
private handleIframe(iframe: HTMLIFrameElement): void {
let context: Window | null = null
let doc: Document | null = null
const handle = this.app.safe(() => {
const id = this.app.nodes.getID(iframe)
if (id === undefined) { return } //log
if (iframe.contentWindow === context) { return } //Does this happen frequently?
context = iframe.contentWindow as Window | null;
if (!context) { return }
const observer = new IFrameObserver(this.app, context)
if (iframe.contentDocument === doc) { return } // How frequently can it happen?
doc = iframe.contentDocument
if (!doc || !iframe.contentWindow) { return }
const observer = new IFrameObserver(this.app)
this.iframeObservers.push(observer)
observer.observe(iframe)
})
this.app.attachEventListener(iframe, "load", handle)
iframe.addEventListener("load", handle) // why app.attachEventListener not working?
handle()
}
private shadowRootObservers: ShadowRootObserver[] = []
private handleShadowRoot(shRoot: ShadowRoot) {
const observer = new ShadowRootObserver(this.app, this.context)
const observer = new ShadowRootObserver(this.app)
this.shadowRootObservers.push(observer)
observer.observe(shRoot.host)
}
@ -81,9 +81,9 @@ export default class TopObserver extends Observer {
// the change in the re-player behaviour caused by CreateDocument message:
// the 0-node ("fRoot") will become #document rather than documentElement as it is now.
// Alternatively - observe(#document) then bindNode(documentElement)
this.observeRoot(this.context.document, () => {
this.observeRoot(window.document, () => {
this.app.send(new CreateDocument())
}, this.context.document.documentElement);
}, window.document.documentElement);
}
disconnect() {

View file

@ -2,8 +2,8 @@ const _io = require('socket.io');
const express = require('express');
const uaParser = require('ua-parser-js');
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
var {extractPeerId} = require('./peerjs-server');
var wsRouter = express.Router();
const {extractPeerId} = require('./peerjs-server');
const wsRouter = express.Router();
const UPDATE_EVENT = "UPDATE_SESSION";
const IDENTITIES = {agent: 'agent', session: 'session'};
const NEW_AGENT = "NEW_AGENT";
@ -12,83 +12,152 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
const NO_SESSIONS = "SESSION_DISCONNECTED";
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
let io;
let debug = process.env.debug === "1" || false;
const debug = process.env.debug === "1" || false;
const socketsList = function (req, res) {
const createSocketIOServer = function (server, prefix) {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
});
}
const extractUserIdFromRequest = function (req) {
if (req.query.userId) {
debug && console.log(`[WS]where userId=${req.query.userId}`);
return req.query.userId;
}
return undefined;
}
const extractProjectKeyFromRequest = function (req) {
if (req.params.projectKey) {
debug && console.log(`[WS]where projectKey=${req.params.projectKey}`);
return req.params.projectKey;
}
return undefined;
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": data}));
}
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions}));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, socketsList);
const socketsListByProject = function (req, res) {
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(sessionId);
if (userId) {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(sessionId);
}
}
} else {
liveSessions[projectKey].push(sessionId);
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, socketsListByProject);
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions}));
respond(res, liveSessions);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, socketsLive);
const socketsLiveByProject = async function (req, res) {
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let userId = extractUserIdFromRequest(req);
let liveSessions = {};
for (let peerId of io.sockets.adapter.rooms.keys()) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === req.params.projectKey) {
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
if (userId) {
if (item.handshake.query.sessionInfo && item.handshake.query.sessionInfo.userID === userId) {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
} else {
liveSessions[projectKey].push(item.handshake.query.sessionInfo);
}
}
}
}
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
respond(res, liveSessions[_projectKey] || []);
}
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, socketsLiveByProject);
@ -167,16 +236,8 @@ function extractSessionInfo(socket) {
module.exports = {
wsRouter,
start: (server) => {
io = _io(server, {
maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6,
cors: {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: '/socket'
});
start: (server, prefix) => {
createSocketIOServer(server, prefix);
io.on('connection', async (socket) => {
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
socket.peerId = socket.handshake.query.peerId;
@ -247,10 +308,10 @@ module.exports = {
socket.onAny(async (eventName, ...args) => {
socket.lastMessageReceivedAt = Date.now();
if (socket.identity === IDENTITIES.session) {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}, members: ${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
socket.to(socket.peerId).emit(eventName, args[0]);
} else {
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
let socketId = await findSessionSocketId(io, socket.peerId);
if (socketId === null) {
debug && console.log(`session not found for:${socket.peerId}`);
@ -264,7 +325,7 @@ module.exports = {
});
console.log("WS server started")
setInterval((io) => {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);