* feat(api): usability testing (#1686) * feat(api): usability testing - wip * feat(db): usabiity testing * feat(api): usability testing - api * feat(api): usability testing - api * feat(api): usability testing - db change * feat(api): usability testing - db change * feat(api): usability testing - unit tests update * feat(api): usability testing - test and tasks stats * feat(api): usability testing - sessions list fix, return zeros if test id is not having signals * Api v1.16.0 (#1698) * feat: canvas support [assist] (#1641) * feat(tracker/ui): start canvas support * feat(tracker): slpeer -> peerjs for canvas streams * fix(ui): fix agent canvas peer id * fix(ui): fix agent canvas peer id * fix(ui): fix peer removal * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(ui): canvas support for ui * fix(tracker): fix falling tests * feat(ui): replay canvas in video * feat(ui): refactor video streaming to draw on canvas * feat(ui): 10hz check for canvas replay * feat(ui): fix for tests * feat(ui): fix for tests * feat(ui): fix for tests * feat(ui): fix for tests cov * feat(ui): mroe test coverage * fix(ui): styling * fix(tracker): support backend settings for canvas * feat(ui): allow devtools to be resizeable (#1605) * fix(ui): console redux tab null check * Api v1.15.0 (#1689) * fix(chalice): fix create alert with MS Teams notification channel closes openreplay/openreplay#1677 * fix(chalice): fix MS Teams notifications * refactor(chalice): enhanced MS Teams notifications closes openreplay/openreplay#1681 (cherry picked from commit265897f509) * fix(ui): filter keys conflcit with metadata, path analysis 4 col * fix(ui): clear the filers and series on card type change * fix(player): fix msg reader bug * fix(DB): fix CH wrong version (#1692) (cherry picked from commit48dbbb55db) * fix(ui): filter keys conflcit with metadata * fix(tracker): unique broadcast channel name * fix(chalice): fixed delete cards (#1697) (cherry picked from commit92fedd310c) * fix(tracker): add trycatch to ignore iframe errors * feat(backend): added ARM arch support to backend services [Dockerfile] * feat(backend): removed userAgent from sessions and unstarted-sessions tables * fix(DB): change path-analysis card size --------- Co-authored-by: Delirium <nikita@openreplay.com> Co-authored-by: Shekar Siri <sshekarsiri@gmail.com> Co-authored-by: Alexander <zavorotynskiy@pm.me> * refactor(chalice): cleaned code (#1699) * feat(api): usability testing - added start_path to the resposne, remove count from the list * feat(api): usability testing - test to have response count and live count * feat(api): usability testing - test to have additional data * Revert "refactor(chalice): cleaned code (#1699)" (#1702) This reverts commit83f2b0c12c. * feat(api): usability testing - responses with total and other improvements * change(api): vulnerability whitelist udpate * feat(api): usability testing - create added missing columns, and sessions with user_id search * feat(api): usability testing - update test with responseCount * feat(api): usability testing - timestamps in unix * feat(api): usability testing - request with proper case change * feat(api): usability testing - task.description nullable * feat(api): usability testing - check deleted status * Api v1.16.0 (#1707) * fix(chalice): fixed search sessions * fix(chalice): fixed search sessions * refactor(chalice): upgraded dependencies * refactor(crons): upgraded dependencies * refactor(alerts): upgraded dependencies * Api v1.16.0 (#1712) * feat(DB): user-testing support * feat(chalice): user testing support * feat(chalice): support utxVideo (#1726) * feat(chalice): changed bucket name for ux testing webcamera videos --------- Co-authored-by: Shekar Siri <sshekarsiri@gmail.com> Co-authored-by: Kraiem Taha Yassine <tahayk2@gmail.com> Co-authored-by: Delirium <nikita@openreplay.com> Co-authored-by: Alexander <zavorotynskiy@pm.me>
219 lines
12 KiB
Python
219 lines
12 KiB
Python
import schemas
|
|
from chalicelib.core import events, metadata, events_ios, \
|
|
sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas, user_testing
|
|
from chalicelib.utils import errors_helper
|
|
from chalicelib.utils import pg_client, helper
|
|
|
|
|
|
def __group_metadata(session, project_metadata):
|
|
meta = {}
|
|
for m in project_metadata.keys():
|
|
if project_metadata[m] is not None and session.get(m) is not None:
|
|
meta[project_metadata[m]] = session[m]
|
|
session.pop(m)
|
|
return meta
|
|
|
|
|
|
# for backward compatibility
|
|
def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
|
|
group_metadata=False, live=True):
|
|
with pg_client.PostgresClient() as cur:
|
|
extra_query = []
|
|
if include_fav_viewed:
|
|
extra_query.append("""COALESCE((SELECT TRUE
|
|
FROM public.user_favorite_sessions AS fs
|
|
WHERE s.session_id = fs.session_id
|
|
AND fs.user_id = %(userId)s), FALSE) AS favorite""")
|
|
extra_query.append("""COALESCE((SELECT TRUE
|
|
FROM public.user_viewed_sessions AS fs
|
|
WHERE s.session_id = fs.session_id
|
|
AND fs.user_id = %(userId)s), FALSE) AS viewed""")
|
|
query = cur.mogrify(
|
|
f"""\
|
|
SELECT
|
|
s.*,
|
|
s.session_id::text AS session_id,
|
|
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
|
|
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
|
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
|
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
|
WHERE s.project_id = %(project_id)s
|
|
AND s.session_id = %(session_id)s;""",
|
|
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
|
)
|
|
cur.execute(query=query)
|
|
|
|
data = cur.fetchone()
|
|
if data is not None:
|
|
data = helper.dict_to_camel_case(data)
|
|
if full_data:
|
|
if data["platform"] == 'ios':
|
|
data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id)
|
|
for e in data['events']:
|
|
if e["type"].endswith("_IOS"):
|
|
e["type"] = e["type"][:-len("_IOS")]
|
|
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
|
|
data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id,
|
|
session_id=session_id)
|
|
data['mobsUrl'] = []
|
|
else:
|
|
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
|
|
group_clickrage=True)
|
|
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
|
|
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
|
# to keep only the first stack
|
|
# limit the number of errors to reduce the response-body size
|
|
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
|
if e['source'] == "js_exception"][:500]
|
|
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
|
session_id=session_id)
|
|
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
|
|
check_existence=False)
|
|
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
|
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
|
check_existence=False)
|
|
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
|
start_ts=data["startTs"], duration=data["duration"])
|
|
|
|
data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id,
|
|
session_id=session_id, user_id=context.user_id)
|
|
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
|
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
|
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
|
|
project_key=data["projectKey"])
|
|
data["inDB"] = True
|
|
return data
|
|
elif live:
|
|
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
|
|
else:
|
|
return None
|
|
|
|
|
|
def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
|
|
group_metadata=False, live=True):
|
|
with pg_client.PostgresClient() as cur:
|
|
extra_query = []
|
|
if include_fav_viewed:
|
|
extra_query.append("""COALESCE((SELECT TRUE
|
|
FROM public.user_favorite_sessions AS fs
|
|
WHERE s.session_id = fs.session_id
|
|
AND fs.user_id = %(userId)s), FALSE) AS favorite""")
|
|
extra_query.append("""COALESCE((SELECT TRUE
|
|
FROM public.user_viewed_sessions AS fs
|
|
WHERE s.session_id = fs.session_id
|
|
AND fs.user_id = %(userId)s), FALSE) AS viewed""")
|
|
query = cur.mogrify(
|
|
f"""\
|
|
SELECT
|
|
s.*,
|
|
s.session_id::text AS session_id,
|
|
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
|
|
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
|
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
|
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
|
WHERE s.project_id = %(project_id)s
|
|
AND s.session_id = %(session_id)s;""",
|
|
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
|
)
|
|
cur.execute(query=query)
|
|
|
|
data = cur.fetchone()
|
|
if data is not None:
|
|
data = helper.dict_to_camel_case(data)
|
|
if full_data:
|
|
if data["platform"] == 'ios':
|
|
data['domURL'] = []
|
|
data['videoURL'] = sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id,
|
|
check_existence=False)
|
|
else:
|
|
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
|
|
check_existence=False)
|
|
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
|
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
|
check_existence=False)
|
|
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
|
|
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
|
|
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
|
|
project_id=project_id,
|
|
check_existence=False)
|
|
else:
|
|
data['utxVideo'] = []
|
|
|
|
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
|
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
|
|
project_key=data["projectKey"])
|
|
data["inDB"] = True
|
|
return data
|
|
elif live:
|
|
return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
|
|
else:
|
|
return None
|
|
|
|
|
|
def get_events(project_id, session_id):
|
|
with pg_client.PostgresClient() as cur:
|
|
query = cur.mogrify(
|
|
f"""SELECT session_id, platform, start_ts, duration
|
|
FROM public.sessions AS s
|
|
WHERE s.project_id = %(project_id)s
|
|
AND s.session_id = %(session_id)s;""",
|
|
{"project_id": project_id, "session_id": session_id}
|
|
)
|
|
cur.execute(query=query)
|
|
|
|
s_data = cur.fetchone()
|
|
if s_data is not None:
|
|
s_data = helper.dict_to_camel_case(s_data)
|
|
data = {}
|
|
if s_data["platform"] == 'ios':
|
|
data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id)
|
|
for e in data['events']:
|
|
if e["type"].endswith("_IOS"):
|
|
e["type"] = e["type"][:-len("_IOS")]
|
|
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
|
|
data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id,
|
|
session_id=session_id)
|
|
data['userTesting'] = []
|
|
else:
|
|
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
|
|
group_clickrage=True)
|
|
all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id)
|
|
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
|
# to keep only the first stack
|
|
# limit the number of errors to reduce the response-body size
|
|
data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors
|
|
if e['source'] == "js_exception"][:500]
|
|
data['userEvents'] = events.get_customs_by_session_id(project_id=project_id,
|
|
session_id=session_id)
|
|
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
|
start_ts=s_data["startTs"], duration=s_data["duration"])
|
|
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
|
|
|
|
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
|
data['issues'] = reduce_issues(data['issues'])
|
|
return data
|
|
else:
|
|
return None
|
|
|
|
|
|
# To reduce the number of issues in the replay;
|
|
# will be removed once we agree on how to show issues
|
|
def reduce_issues(issues_list):
|
|
if issues_list is None:
|
|
return None
|
|
i = 0
|
|
# remove same-type issues if the time between them is <2s
|
|
while i < len(issues_list) - 1:
|
|
for j in range(i + 1, len(issues_list)):
|
|
if issues_list[i]["type"] == issues_list[j]["type"]:
|
|
break
|
|
else:
|
|
i += 1
|
|
break
|
|
|
|
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
|
|
issues_list.pop(j)
|
|
else:
|
|
i += 1
|
|
|
|
return issues_list
|