Api v1.16.0 (#1730)

* feat(api): usability testing (#1686)

* feat(api): usability testing - wip

* feat(db): usabiity testing

* feat(api): usability testing - api

* feat(api): usability testing - api

* feat(api): usability testing - db change

* feat(api): usability testing - db change

* feat(api): usability testing - unit tests update

* feat(api): usability testing - test and tasks stats

* feat(api): usability testing - sessions list fix, return zeros if test id is not having signals

* Api v1.16.0 (#1698)

* feat: canvas support [assist] (#1641)

* feat(tracker/ui): start canvas support

* feat(tracker): slpeer -> peerjs for canvas streams

* fix(ui): fix agent canvas peer id

* fix(ui): fix agent canvas peer id

* fix(ui): fix peer removal

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(ui): canvas support for ui

* fix(tracker): fix falling tests

* feat(ui): replay canvas in video

* feat(ui): refactor video streaming to draw on canvas

* feat(ui): 10hz check for canvas replay

* feat(ui): fix for tests

* feat(ui): fix for tests

* feat(ui): fix for tests

* feat(ui): fix for tests cov

* feat(ui): mroe test coverage

* fix(ui): styling

* fix(tracker): support backend settings for canvas

* feat(ui): allow devtools to be resizeable (#1605)

* fix(ui): console redux tab null check

* Api v1.15.0 (#1689)

* fix(chalice): fix create alert with MS Teams notification channel
closes openreplay/openreplay#1677

* fix(chalice): fix MS Teams notifications
* refactor(chalice): enhanced MS Teams notifications
closes openreplay/openreplay#1681

(cherry picked from commit 265897f509)

* fix(ui): filter keys conflcit with metadata, path analysis 4 col

* fix(ui): clear the filers and series on card type change

* fix(player): fix msg reader bug

* fix(DB): fix CH wrong version (#1692)

(cherry picked from commit 48dbbb55db)

* fix(ui): filter keys conflcit with metadata

* fix(tracker): unique broadcast channel name

* fix(chalice): fixed delete cards (#1697)

(cherry picked from commit 92fedd310c)

* fix(tracker): add trycatch to ignore iframe errors

* feat(backend): added ARM arch support to backend services [Dockerfile]

* feat(backend): removed userAgent from sessions and unstarted-sessions tables

* fix(DB): change path-analysis card size

---------

Co-authored-by: Delirium <nikita@openreplay.com>
Co-authored-by: Shekar Siri <sshekarsiri@gmail.com>
Co-authored-by: Alexander <zavorotynskiy@pm.me>

* refactor(chalice): cleaned code (#1699)

* feat(api): usability testing - added start_path to the resposne, remove count from the list

* feat(api): usability testing - test to have response count and live count

* feat(api): usability testing - test to have additional data

* Revert "refactor(chalice): cleaned code (#1699)" (#1702)

This reverts commit 83f2b0c12c.

* feat(api): usability testing - responses with total and other improvements

* change(api): vulnerability whitelist udpate

* feat(api): usability testing - create added missing columns, and sessions with user_id search

* feat(api): usability testing - update test with responseCount

* feat(api): usability testing - timestamps in unix

* feat(api): usability testing - request with proper case change

* feat(api): usability testing - task.description nullable

* feat(api): usability testing - check deleted status

* Api v1.16.0 (#1707)

* fix(chalice): fixed search sessions

* fix(chalice): fixed search sessions
* refactor(chalice): upgraded dependencies
* refactor(crons): upgraded dependencies
* refactor(alerts): upgraded dependencies

* Api v1.16.0 (#1712)

* feat(DB): user-testing support

* feat(chalice): user testing support

* feat(chalice): support utxVideo (#1726)

* feat(chalice): changed bucket name for ux testing webcamera videos

---------

Co-authored-by: Shekar Siri <sshekarsiri@gmail.com>
Co-authored-by: Kraiem Taha Yassine <tahayk2@gmail.com>
Co-authored-by: Delirium <nikita@openreplay.com>
Co-authored-by: Alexander <zavorotynskiy@pm.me>
This commit is contained in:
Rajesh Rajendran 2023-11-30 10:53:31 +01:00 committed by GitHub
parent ee6ddbb6cb
commit 4c7f2edd57
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 1381 additions and 15 deletions

View file

@ -1,3 +1,3 @@
# Accept the risk until # Accept the risk until
# python setup tools recently fixed. Not yet avaialable in distros. # python setup tools recently fixed. Not yet available in distros.
CVE-2022-40897 exp:2023-02-01 CVE-2023-5363 exp:2023-12-31

View file

@ -0,0 +1,202 @@
import logging
from chalicelib.utils import helper, pg_client
class DatabaseRequestHandler:
def __init__(self, table_name):
self.table_name = table_name
self.constraints = []
self.params = {}
self.order_clause = ""
self.sort_clause = ""
self.select_columns = []
self.sub_queries = []
self.joins = []
self.group_by_clause = ""
self.client = pg_client
self.logger = logging.getLogger(__name__)
self.pagination = {}
def add_constraint(self, constraint, param=None):
self.constraints.append(constraint)
if param:
self.params.update(param)
def add_subquery(self, subquery, alias, param=None):
self.sub_queries.append((subquery, alias))
if param:
self.params.update(param)
def add_join(self, join_clause):
self.joins.append(join_clause)
def add_param(self, key, value):
self.params[key] = value
def set_order_by(self, order_by):
self.order_clause = order_by
def set_sort_by(self, sort_by):
self.sort_clause = sort_by
def set_select_columns(self, columns):
self.select_columns = columns
def set_group_by(self, group_by_clause):
self.group_by_clause = group_by_clause
def set_pagination(self, page, page_size):
"""
Set pagination parameters for the query.
:param page: The page number (1-indexed)
:param page_size: Number of items per page
"""
self.pagination = {
'offset': (page - 1) * page_size,
'limit': page_size
}
def build_query(self, action="select", additional_clauses=None, data=None):
if action == "select":
query = f"SELECT {', '.join(self.select_columns)} FROM {self.table_name}"
elif action == "insert":
columns = ', '.join(data.keys())
placeholders = ', '.join(f'%({k})s' for k in data.keys())
query = f"INSERT INTO {self.table_name} ({columns}) VALUES ({placeholders})"
elif action == "update":
set_clause = ', '.join(f"{k} = %({k})s" for k in data.keys())
query = f"UPDATE {self.table_name} SET {set_clause}"
elif action == "delete":
query = f"DELETE FROM {self.table_name}"
for join in self.joins:
query += f" {join}"
for subquery, alias in self.sub_queries:
query += f", ({subquery}) AS {alias}"
if self.constraints:
query += " WHERE " + " AND ".join(self.constraints)
if action == "select":
if self.group_by_clause:
query += " GROUP BY " + self.group_by_clause
if self.sort_clause:
query += " ORDER BY " + self.sort_clause
if self.order_clause:
query += " " + self.order_clause
if hasattr(self, 'pagination') and self.pagination:
query += " LIMIT %(limit)s OFFSET %(offset)s"
self.params.update(self.pagination)
if additional_clauses:
query += " " + additional_clauses
logging.info(f"Query: {query}")
return query
def execute_query(self, query, data=None):
try:
with self.client.PostgresClient() as cur:
mogrified_query = cur.mogrify(query, {**data, **self.params} if data else self.params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def fetchall(self):
query = self.build_query()
return self.execute_query(query)
def fetchone(self):
query = self.build_query()
result = self.execute_query(query)
return result[0] if result else None
def insert(self, data):
query = self.build_query(action="insert", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
return result[0] if result else None
def update(self, data):
query = self.build_query(action="update", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
return result[0] if result else None
def delete(self):
query = self.build_query(action="delete")
return self.execute_query(query)
def batch_insert(self, items):
if not items:
return None
columns = ', '.join(items[0].keys())
# Building a values string with unique parameter names for each item
all_values_query = ', '.join(
'(' + ', '.join([f"%({key}_{i})s" for key in item]) + ')'
for i, item in enumerate(items)
)
query = f"INSERT INTO {self.table_name} ({columns}) VALUES {all_values_query} RETURNING *;"
try:
with self.client.PostgresClient() as cur:
# Flatten items into a single dictionary with unique keys
combined_params = {f"{k}_{i}": v for i, item in enumerate(items) for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
return cur.fetchall()
except Exception as e:
self.logger.error(f"Database batch insert operation failed: {e}")
raise
def raw_query(self, query, params=None):
try:
with self.client.PostgresClient() as cur:
mogrified_query = cur.mogrify(query, params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def batch_update(self, items):
if not items:
return None
id_column = list(items[0])[0]
# Building the set clause for the update statement
update_columns = list(items[0].keys())
update_columns.remove(id_column)
set_clause = ', '.join([f"{col} = v.{col}" for col in update_columns])
# Building the values part for the 'VALUES' section
values_rows = []
for item in items:
values = ', '.join([f"%({key})s" for key in item.keys()])
values_rows.append(f"({values})")
values_query = ', '.join(values_rows)
# Constructing the full update query
query = f"""
UPDATE {self.table_name} AS t
SET {set_clause}
FROM (VALUES {values_query}) AS v ({', '.join(items[0].keys())})
WHERE t.{id_column} = v.{id_column};
"""
try:
with self.client.PostgresClient() as cur:
# Flatten items into a single dictionary for mogrify
combined_params = {k: v for item in items for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
except Exception as e:
self.logger.error(f"Database batch update operation failed: {e}")
raise

View file

@ -8,7 +8,7 @@ from chalicelib.utils import sql_helper as sh
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
SESSION_PROJECTION_COLS = """s.project_id, SESSION_PROJECTION_BASE_COLS = """s.project_id,
s.session_id::text AS session_id, s.session_id::text AS session_id,
s.user_uuid, s.user_uuid,
s.user_id, s.user_id,
@ -28,7 +28,9 @@ s.user_anonymous_id,
s.platform, s.platform,
s.issue_score, s.issue_score,
s.timezone, s.timezone,
to_jsonb(s.issue_types) AS issue_types, to_jsonb(s.issue_types) AS issue_types """
SESSION_PROJECTION_COLS = SESSION_PROJECTION_BASE_COLS + """,
favorite_sessions.session_id NOTNULL AS favorite, favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs FROM public.user_viewed_sessions AS fs
@ -1260,3 +1262,28 @@ def check_recording_status(project_id: int) -> dict:
"recordingStatus": row["recording_status"], "recordingStatus": row["recording_status"],
"sessionsCount": row["sessions_count"] "sessionsCount": row["sessions_count"]
} }
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
ascending: bool = False) -> dict:
if session_ids is None or len(session_ids) == 0:
return {"total": 0, "sessions": []}
with pg_client.PostgresClient() as cur:
meta_keys = metadata.get(project_id=project_id)
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
order_direction = 'ASC' if ascending else 'DESC'
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_BASE_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
FROM public.sessions AS s
WHERE project_id=%(project_id)s
AND session_id IN %(session_ids)s
ORDER BY {sort_by} {order_direction};""", params)
cur.execute(main_query)
rows = cur.fetchall()
if len(meta_keys) > 0:
for s in rows:
s["metadata"] = {}
for m in meta_keys:
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}

View file

@ -1,6 +1,6 @@
import schemas import schemas
from chalicelib.core import events, metadata, events_ios, \ from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas, user_testing
from chalicelib.utils import errors_helper from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
@ -132,6 +132,12 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
project_id=project_id,
check_existence=False)
else:
data['utxVideo'] = []
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
@ -167,6 +173,7 @@ def get_events(project_id, session_id):
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['userTesting'] = []
else: else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
@ -180,6 +187,7 @@ def get_events(project_id, session_id):
session_id=session_id) session_id=session_id)
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
start_ts=s_data["startTs"], duration=s_data["duration"]) start_ts=s_data["startTs"], duration=s_data["duration"])
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['issues'] = reduce_issues(data['issues']) data['issues'] = reduce_issues(data['issues'])

View file

@ -0,0 +1,124 @@
from fastapi import Body, Depends
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestRead, UTTestUpdate, UTTestDelete, SearchResult, \
UTTestSearch, UTTestSessionsSearch, UTTestResponsesSearch, StatusEnum, UTTestStatusUpdate
from chalicelib.core.usability_testing import service
from or_dependencies import OR_context
from routers.base import get_routers
from schemas import schemas
public_app, app, app_apikey = get_routers()
tags = ["usability-tests"]
@app.post("/{projectId}/usability-tests/search", tags=tags)
async def search_ui_tests(
projectId: int,
search: UTTestSearch = Body(...,
description="The search parameters including the query, page, limit, sort_by, "
"and sort_order.")
):
"""
Search for UT tests within a given project with pagination and optional sorting.
- **projectId**: The unique identifier of the project to search within.
- **search**: The search parameters including the query, page, limit, sort_by, and sort_order.
"""
return service.search_ui_tests(projectId, search)
@app.post("/{projectId}/usability-tests", tags=tags)
async def create_ut_test(projectId: int, test_data: UTTestCreate,
context: schemas.CurrentContext = Depends(OR_context)):
"""
Create a new UT test in the specified project.
- **projectId**: The unique identifier of the project.
- **test_data**: The data for the new UT test.
"""
test_data.project_id = projectId
test_data.created_by = context.user_id
return service.create_ut_test(test_data)
@app.get("/{projectId}/usability-tests/{test_id}", tags=tags)
async def get_ut_test(projectId: int, test_id: int):
"""
Retrieve a specific UT test by its ID.
- **projectId**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test.
"""
return service.get_ut_test(projectId, test_id)
@app.delete("/{projectId}/usability-tests/{test_id}", tags=tags)
async def delete_ut_test(projectId: int, test_id: int):
"""
Delete a specific UT test by its ID.
- **projectId**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test to be deleted.
"""
return service.delete_ut_test(projectId, test_id)
@app.put("/{projectId}/usability-tests/{test_id}", tags=tags)
async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate):
"""
Update a specific UT test by its ID.
- **project_id**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test to be updated.
- **test_update**: The updated data for the UT test.
"""
return service.update_ut_test(projectId, test_id, test_update)
@app.get("/{projectId}/usability-tests/{test_id}/sessions", tags=tags)
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
live: bool = False,
user_id: str = None):
"""
Get sessions related to a specific UT test.
- **projectId**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test.
"""
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
@app.get("/{projectId}/usability-tests/{test_id}/responses/{task_id}", tags=tags)
async def get_responses(test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
"""
Get responses related to a specific UT test.
- **project_id**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test.
"""
return service.get_responses(test_id, task_id, page, limit, query)
@app.get("/{projectId}/usability-tests/{test_id}/statistics", tags=tags)
async def get_statistics(test_id: int):
"""
Get statistics related to a specific UT test.
:param test_id:
:return:
"""
return service.get_statistics(test_id=test_id)
@app.get("/{projectId}/usability-tests/{test_id}/task-statistics", tags=tags)
async def get_task_statistics(test_id: int):
"""
Get statistics related to a specific UT test.
:param test_id:
:return:
"""
return service.get_task_statistics(test_id=test_id)

View file

@ -0,0 +1,134 @@
from typing import Optional, List
from pydantic import Field
from datetime import datetime
from enum import Enum
from schemas import BaseModel
from pydantic.v1 import validator
class StatusEnum(str, Enum):
preview = 'preview'
in_progress = 'in-progress'
paused = 'paused'
closed = 'closed'
class UTTestTask(BaseModel):
task_id: Optional[int] = Field(None, description="The unique identifier of the task")
test_id: Optional[int] = Field(None, description="The unique identifier of the usability test")
title: str = Field(..., description="The title of the task")
description: Optional[str] = Field(None, description="A detailed description of the task")
allow_typing: Optional[bool] = Field(False, description="Indicates if the user is allowed to type")
class UTTestBase(BaseModel):
title: str = Field(..., description="The title of the usability test")
project_id: Optional[int] = Field(None, description="The ID of the associated project")
created_by: Optional[int] = Field(None, description="The ID of the user who created the test")
starting_path: Optional[str] = Field(None, description="The starting path for the usability test")
status: Optional[StatusEnum] = Field(StatusEnum.in_progress, description="The current status of the usability test")
require_mic: bool = Field(False, description="Indicates if a microphone is required")
require_camera: bool = Field(False, description="Indicates if a camera is required")
description: Optional[str] = Field(None, description="A detailed description of the usability test")
guidelines: Optional[str] = Field(None, description="Guidelines for the usability test")
conclusion_message: Optional[str] = Field(None, description="Conclusion message for the test participants")
visibility: bool = Field(False, description="Flag to indicate if the test is visible to the public")
tasks: Optional[List[UTTestTask]] = Field(None, description="List of tasks for the usability test")
class UTTestCreate(UTTestBase):
pass
class UTTestStatusUpdate(BaseModel):
status: StatusEnum = Field(..., description="The updated status of the usability test")
class UTTestRead(UTTestBase):
test_id: int = Field(..., description="The unique identifier of the usability test")
created_by: Optional[int] = Field(None, description="The ID of the user who created the test")
updated_by: Optional[int] = Field(None, description="The ID of the user who last updated the test")
created_at: datetime = Field(..., description="The timestamp when the test was created")
updated_at: datetime = Field(..., description="The timestamp when the test was last updated")
deleted_at: Optional[datetime] = Field(None, description="The timestamp when the test was deleted, if applicable")
class UTTestUpdate(BaseModel):
# Optional fields for updating the usability test
title: Optional[str] = Field(None, description="The updated title of the usability test")
status: Optional[StatusEnum] = Field(None, description="The updated status of the usability test")
description: Optional[str] = Field(None, description="The updated description of the usability test")
starting_path: Optional[str] = Field(None, description="The updated starting path for the usability test")
require_mic: Optional[bool] = Field(None, description="Indicates if a microphone is required")
require_camera: Optional[bool] = Field(None, description="Indicates if a camera is required")
guidelines: Optional[str] = Field(None, description="Updated guidelines for the usability test")
conclusion_message: Optional[str] = Field(None, description="Updated conclusion message for the test participants")
visibility: Optional[bool] = Field(None, description="Flag to indicate if the test is visible to the public")
tasks: Optional[List[UTTestTask]] = Field([], description="List of tasks for the usability test")
class UTTestDelete(BaseModel):
# You would usually not need a model for deletion, but let's assume you need to confirm the deletion timestamp
deleted_at: datetime = Field(..., description="The timestamp when the test is marked as deleted")
class UTTestSearch(BaseModel):
query: Optional[str] = Field(None, description="Search query for the UT tests")
page: Optional[int] = Field(1, ge=1, description="Page number of the results")
limit: Optional[int] = Field(10, ge=1, le=100, description="Number of results per page")
sort_by: Optional[str] = Field(description="Field to sort by", default="created_at")
sort_order: Optional[str] = Field("asc", description="Sort order: 'asc' or 'desc'")
is_active: Optional[bool] = Field(True, description="Flag to indicate if the test is active")
user_id: Optional[int] = Field(None, description="The ID of the user who created the test")
@validator('sort_order')
def sort_order_must_be_valid(cls, v):
if v not in ['asc', 'desc']:
raise ValueError('Sort order must be either "asc" or "desc"')
return v
class UTTestResponsesSearch(BaseModel):
query: Optional[str] = Field(None, description="Search query for the UT responses")
page: Optional[int] = Field(1, ge=1, description="Page number of the results")
limit: Optional[int] = Field(10, ge=1, le=100, description="Number of results per page")
class UTTestSignal(BaseModel):
signal_id: int = Field(..., description="The unique identifier of the response")
test_id: int = Field(..., description="The unique identifier of the usability test")
session_id: int = Field(..., description="The unique identifier of the session")
type: str = Field(..., description="The type of the signal")
type_id: int = Field(..., description="The unique identifier of the type")
status: str = Field(..., description="The status of the signal")
comment: Optional[str] = Field(None, description="The comment for the signal")
timestamp: datetime = Field(..., description="The timestamp when the signal was created")
class UTTestResponse(BaseModel):
test_id: int = Field(..., description="The unique identifier of the usability test")
response_id: str = Field(..., description="The type of the signal")
status: str = Field(..., description="The status of the signal")
comment: Optional[str] = Field(None, description="The comment for the signal")
timestamp: datetime = Field(..., description="The timestamp when the signal was created")
class UTTestSession(BaseModel):
test_id: int = Field(..., description="The unique identifier of the usability test")
session_id: int = Field(..., description="The unique identifier of the session")
status: str = Field(..., description="The status of the signal")
timestamp: datetime = Field(..., description="The timestamp when the signal was created")
class UTTestSessionsSearch(BaseModel):
page: Optional[int] = Field(1, ge=1, description="Page number of the results")
limit: Optional[int] = Field(10, ge=1, le=100, description="Number of results per page")
status: Optional[str] = Field(None, description="The status of the session")
class SearchResult(BaseModel):
results: List[UTTestRead]
total: int
page: int
limit: int

View file

@ -0,0 +1,396 @@
import logging
from fastapi import HTTPException, status
from chalicelib.core.db_request_handler import DatabaseRequestHandler
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestSearch, UTTestUpdate, UTTestStatusUpdate
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import dict_to_camel_case, list_to_camel_case
from chalicelib.core import sessions, metadata
table_name = "ut_tests"
def search_ui_tests(project_id: int, search: UTTestSearch):
select_columns = [
"ut.test_id",
"ut.title",
"ut.description",
"ut.created_at",
"ut.updated_at",
"ut.status",
"json_build_object('user_id', u.user_id, 'name', u.name) AS created_by"
]
db_handler = DatabaseRequestHandler("ut_tests AS ut")
db_handler.set_select_columns([f"COUNT(*) OVER() AS count"] + select_columns)
db_handler.add_join("LEFT JOIN users u ON ut.created_by = u.user_id")
db_handler.add_constraint("ut.project_id = %(project_id)s", {'project_id': project_id})
db_handler.add_constraint("ut.deleted_at IS NULL")
db_handler.set_sort_by(f"ut.{search.sort_by} {search.sort_order}")
db_handler.set_pagination(page=search.page, page_size=search.limit)
if (search.user_id is not None) and (search.user_id != 0):
db_handler.add_constraint("ut.created_by = %(user_id)s", {'user_id': search.user_id})
if search.query:
db_handler.add_constraint("ut.title ILIKE %(query)s", {'query': f"%{search.query}%"})
rows = db_handler.fetchall()
if not rows or len(rows) == 0:
return {"data": {"total": 0, "list": []}}
total = rows[0]["count"]
for row in rows:
del row["count"]
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
row["updated_at"] = TimeUTC.datetime_to_timestamp(row["updated_at"])
return {
"data": {
"list": list_to_camel_case(rows),
"total": total,
"page": search.page,
"limit": search.limit
}
}
def create_ut_test(test_data: UTTestCreate):
db_handler = DatabaseRequestHandler("ut_tests")
data = {
'project_id': test_data.project_id,
'title': test_data.title,
'description': test_data.description,
'created_by': test_data.created_by,
'status': test_data.status,
'conclusion_message': test_data.conclusion_message,
'starting_path': test_data.starting_path,
'require_mic': test_data.require_mic,
'require_camera': test_data.require_camera,
'guidelines': test_data.guidelines,
'visibility': test_data.visibility,
}
# Execute the insert query
new_test = db_handler.insert(data)
test_id = new_test['test_id']
new_test['created_at'] = TimeUTC.datetime_to_timestamp(new_test['created_at'])
new_test['updated_at'] = TimeUTC.datetime_to_timestamp(new_test['updated_at'])
# Insert tasks
if test_data.tasks:
new_test['tasks'] = insert_tasks(test_id, test_data.tasks)
else:
new_test['tasks'] = []
return {
"data": dict_to_camel_case(new_test)
}
def insert_tasks(test_id, tasks):
db_handler = DatabaseRequestHandler("ut_tests_tasks")
data = []
for task in tasks:
data.append({
'test_id': test_id,
'title': task.title,
'description': task.description,
'allow_typing': task.allow_typing,
})
return db_handler.batch_insert(data)
def get_ut_test(project_id: int, test_id: int):
db_handler = DatabaseRequestHandler("ut_tests AS ut")
tasks_sql = """
SELECT COALESCE(jsonb_agg(utt ORDER BY task_id), '[]'::jsonb) AS tasks
FROM public.ut_tests_tasks AS utt
WHERE utt.test_id = %(test_id)s
"""
select_columns = [
"ut.test_id",
"ut.title",
"ut.description",
"ut.status",
"ut.created_at",
"ut.updated_at",
"ut.starting_path",
"ut.conclusion_message",
"ut.require_mic",
"ut.require_camera",
"ut.guidelines",
"ut.visibility",
"json_build_object('id', u.user_id, 'name', u.name) AS created_by",
"COALESCE((SELECT COUNT(*) FROM ut_tests_signals uts WHERE uts.test_id = ut.test_id AND uts.task_id IS NOT NULL AND uts.status in %(response_statuses)s AND uts.comment is NOT NULL), 0) AS responses_count",
"COALESCE((SELECT COUNT(*) FROM ut_tests_signals uts WHERE uts.test_id = ut.test_id AND uts.duration IS NULL AND uts.task_id IS NULL), 0) AS live_count",
]
db_handler.add_param("response_statuses", ('done', 'skipped'))
db_handler.set_select_columns(select_columns + [f"({tasks_sql}) AS tasks"])
db_handler.add_join("LEFT JOIN users u ON ut.created_by = u.user_id")
db_handler.add_constraint("ut.project_id = %(project_id)s", {'project_id': project_id})
db_handler.add_constraint("ut.test_id = %(test_id)s", {'test_id': test_id})
db_handler.add_constraint("ut.deleted_at IS NULL")
row = db_handler.fetchone()
if not row:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Test not found")
row['created_at'] = TimeUTC.datetime_to_timestamp(row['created_at'])
row['updated_at'] = TimeUTC.datetime_to_timestamp(row['updated_at'])
row['tasks'] = [dict_to_camel_case(task) for task in row['tasks']]
return {
"data": dict_to_camel_case(row)
}
def delete_ut_test(project_id: int, test_id: int):
db_handler = DatabaseRequestHandler("ut_tests")
update_data = {'deleted_at': 'NOW()'} # Using a SQL function directly
db_handler.add_constraint("project_id = %(project_id)s", {'project_id': project_id})
db_handler.add_constraint("test_id = %(test_id)s", {'test_id': test_id})
db_handler.add_constraint("deleted_at IS NULL")
try:
db_handler.update(update_data)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
def check_test_exists(db_handler, project_id, test_id):
db_handler.set_select_columns(['1']) # '1' as a dummy column for existence check
db_handler.add_constraint("project_id = %(project_id)s", {'project_id': project_id})
db_handler.add_constraint("test_id = %(test_id)s", {'test_id': test_id})
db_handler.add_constraint("deleted_at IS NULL")
return bool(db_handler.fetchone())
def update_ut_test(project_id: int, test_id: int, test_update: UTTestUpdate):
db_handler = DatabaseRequestHandler("ut_tests")
# Check if the test exists
if not check_test_exists(db_handler, project_id, test_id):
return {"status": "error", "message": "Test not found"}
tasks = test_update.tasks
del test_update.tasks
update_data = test_update.model_dump(exclude_unset=True)
if not update_data:
return {"status": "no_update"}
db_handler.constraints.clear()
db_handler.add_constraint("project_id = %(project_id)s", {'project_id': project_id})
db_handler.add_constraint("test_id = %(test_id)s", {'test_id': test_id})
db_handler.add_constraint("deleted_at IS NULL")
result = db_handler.update(update_data)
if result is None:
return {"status": "error", "message": "No update was made"}
result['tasks'] = check_tasks_update(db_handler, test_id, tasks)
result['created_at'] = TimeUTC.datetime_to_timestamp(result['created_at'])
result['updated_at'] = TimeUTC.datetime_to_timestamp(result['updated_at'])
return {
"data": dict_to_camel_case(result)
}
def check_tasks_update(db_handler, test_id, tasks):
if tasks is None:
return []
db_handler = DatabaseRequestHandler("ut_tests_tasks")
existing_tasks = get_test_tasks(db_handler, test_id)
existing_task_ids = {task['task_id'] for task in existing_tasks}
to_be_updated = [task for task in tasks if task.task_id in existing_task_ids]
to_be_created = [task for task in tasks if task.task_id not in existing_task_ids]
to_be_deleted = existing_task_ids - {task.task_id for task in tasks}
# Perform batch operations
if to_be_updated:
batch_update_tasks(db_handler, to_be_updated)
if to_be_created:
insert_tasks(test_id, to_be_created)
if to_be_deleted:
delete_tasks(db_handler, to_be_deleted)
return get_test_tasks(db_handler, test_id)
def delete_tasks(db_handler, task_ids):
db_handler.constraints.clear()
db_handler.add_constraint("task_id IN %(task_ids)s", {'task_ids': tuple(task_ids)})
db_handler.delete()
def batch_update_tasks(db_handler, tasks):
db_handler = DatabaseRequestHandler("ut_tests_tasks")
data = []
for task in tasks:
data.append({
'task_id': task.task_id,
'title': task.title,
'description': task.description,
'allow_typing': task.allow_typing,
})
db_handler.batch_update(data)
def get_test_tasks(db_handler, test_id):
db_handler.constraints.clear()
db_handler.set_select_columns(['task_id', 'title', 'description', 'allow_typing'])
db_handler.add_constraint("test_id = %(test_id)s", {'test_id': test_id})
return db_handler.fetchall()
def ut_tests_sessions(project_id: int, test_id: int, page: int, limit: int, user_id: int = None, live: bool = False):
handler = DatabaseRequestHandler("ut_tests_signals AS uts")
handler.set_select_columns(["uts.session_id"])
handler.add_constraint("uts.test_id = %(test_id)s", {'test_id': test_id})
handler.add_constraint("uts.task_id is NULL")
handler.set_pagination(page, limit)
if user_id:
handler.add_constraint("s.user_id = %(user_id)s", {'user_id': user_id})
handler.add_join("JOIN sessions s ON s.session_id = uts.session_id")
if live:
handler.add_constraint("uts.duration IS NULL")
else:
handler.add_constraint("uts.status IN %(status_list)s", {'status_list': ('done', 'skipped')})
session_ids = handler.fetchall()
session_ids = [session['session_id'] for session in session_ids]
sessions_list = sessions.search_sessions_by_ids(project_id=project_id, session_ids=session_ids)
sessions_list['page'] = page
return sessions_list
def get_responses(test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
db_handler = DatabaseRequestHandler("ut_tests_signals AS uts")
db_handler.set_select_columns([
"COUNT(*) OVER() AS count",
"uts.status",
"uts.timestamp",
"uts.comment",
"s.user_id",
])
db_handler.add_constraint("uts.comment IS NOT NULL")
db_handler.add_constraint("uts.status IN %(status_list)s", {'status_list': ('done', 'skipped')})
db_handler.add_constraint("uts.test_id = %(test_id)s", {'test_id': test_id})
db_handler.add_constraint("uts.task_id = %(task_id)s", {'task_id': task_id})
db_handler.set_pagination(page, limit)
db_handler.add_join("JOIN sessions s ON s.session_id = uts.session_id")
if query:
db_handler.add_constraint("uts.comment ILIKE %(query)s", {'query': f"%{query}%"})
responses = db_handler.fetchall()
count = responses[0]['count'] if responses else 0
for response in responses:
del response['count']
return {
"data": {
"total": count,
"list": responses,
"page": page,
"limit": limit
}
}
def get_statistics(test_id: int):
try:
handler = DatabaseRequestHandler("ut_tests_signals sig")
results = handler.raw_query("""
WITH TaskCounts AS (SELECT test_id, COUNT(*) as total_tasks
FROM ut_tests_tasks
GROUP BY test_id),
CompletedSessions AS (SELECT s.session_id, s.test_id
FROM ut_tests_signals s
WHERE s.test_id = %(test_id)s
AND s.status = 'done'
AND s.task_id IS NOT NULL
GROUP BY s.session_id, s.test_id
HAVING COUNT(DISTINCT s.task_id) = (SELECT total_tasks FROM TaskCounts
WHERE test_id = s.test_id))
SELECT sig.test_id,
sum(case when sig.task_id is null then 1 else 0 end) as tests_attempts,
sum(case when sig.task_id is null and sig.status = 'skipped' then 1 else 0 end) as tests_skipped,
sum(case when sig.task_id is not null and sig.status = 'done' then 1 else 0 end) as tasks_completed,
sum(case when sig.task_id is not null and sig.status = 'skipped' then 1 else 0 end) as tasks_skipped,
(SELECT COUNT(*) FROM CompletedSessions WHERE test_id = sig.test_id) as completed_all_tasks
FROM ut_tests_signals sig
LEFT JOIN TaskCounts tc ON sig.test_id = tc.test_id
WHERE sig.status IN ('done', 'skipped')
AND sig.test_id = %(test_id)s
GROUP BY sig.test_id;
""", params={
'test_id': test_id
})
if results is None or len(results) == 0:
return {
"data": {
"tests_attempts": 0,
"tests_skipped": 0,
"tasks_completed": 0,
"tasks_skipped": 0,
"completed_all_tasks": 0
}
}
return {
"data": results[0]
}
except HTTPException as http_exc:
raise http_exc
except Exception as e:
logging.error(f"Unexpected error occurred: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal server error")
def get_task_statistics(test_id: int):
db_handler = DatabaseRequestHandler("ut_tests_tasks utt")
db_handler.set_select_columns([
"utt.task_id",
"utt.title",
"sum(case when uts.status = 'done' then 1 else 0 end) as completed",
"avg(case when uts.status = 'done' then uts.duration else 0 end) as avg_completion_time",
"sum(case when uts.status = 'skipped' then 1 else 0 end) as skipped"
])
db_handler.add_join("JOIN ut_tests_signals uts ON utt.task_id = uts.task_id")
db_handler.add_constraint("utt.test_id = %(test_id)s", {'test_id': test_id})
db_handler.set_group_by("utt.task_id, utt.title")
rows = db_handler.fetchall()
return {
"data": list_to_camel_case(rows)
}

View file

@ -0,0 +1,147 @@
import unittest
import datetime
from unittest.mock import MagicMock, patch
from fastapi import HTTPException
from chalicelib.core.usability_testing.service import search_ui_tests, create_ut_test, get_ut_test, delete_ut_test, \
update_ut_test
from chalicelib.core.usability_testing.schema import UTTestSearch, UTTestCreate, UTTestUpdate
class TestUsabilityTesting(unittest.TestCase):
def setUp(self):
self.mocked_pool = patch('chalicelib.utils.pg_client.postgreSQL_pool').start()
self.mocked_pool.getconn.return_value = MagicMock()
# Mocking the PostgresClient
self.mock_pg_client = patch('chalicelib.utils.pg_client.PostgresClient').start()
self.mocked_cursor = MagicMock()
self.mock_pg_client.return_value.__enter__.return_value = self.mocked_cursor
# Mocking init and terminate functions
self.mocked_init = patch('chalicelib.utils.pg_client.init').start()
self.mocked_terminate = patch('chalicelib.utils.pg_client.terminate').start()
def tearDown(self):
patch.stopall()
def test_search_ui_tests_returns_correct_data(self):
self.mocked_cursor.fetchall.return_value = [
{
"count": 1,
"test_id": 123,
"title": "Test",
"description": "Description",
"is_active": True,
"created_by": 1,
"created_at": datetime.datetime.now().isoformat(),
"updated_at": datetime.datetime.now().isoformat(),
},
]
result = search_ui_tests(1, UTTestSearch(page=1, limit=10, sort_by='test_id', sort_order='asc'))
result = result['data']
self.assertEqual(1, len(result['list']))
self.assertEqual(1, result['total'])
self.assertEqual(1, result['page'])
self.assertEqual(10, result['limit'])
def test_create_ut_test_creates_record(self):
data = UTTestCreate(title="Test", description="Description", is_active=True, project_id=1, status="preview")
self.mocked_cursor.fetchall.return_value = [
{
"project_id": 1,
"status": "preview",
"test_id": 123,
"title": "Test",
"description": "Description",
"is_active": True,
"created_by": 1,
"created_at": datetime.datetime.now().isoformat(),
"updated_at": datetime.datetime.now().isoformat(),
}
]
result = create_ut_test(data)
self.assertEqual(result['data']['testId'], 123)
self.assertEqual(result['data']['title'], "Test")
self.assertEqual(result['data']['description'], "Description")
self.assertEqual(result['data']['isActive'], True)
self.assertEqual(result['data']['createdBy'], 1)
self.assertEqual(result['data']['status'], "preview")
def test_get_ut_test_returns_correct_data(self):
self.mocked_cursor.fetchall.return_value = [
{
"test_id": 123,
"title": "Test",
"created_by": 1,
"created_at": datetime.datetime.now().isoformat(),
"updated_at": datetime.datetime.now().isoformat(),
"tasks": [
{
"task_id": 1,
"test_id": 123,
"title": "Task",
"description": "Description",
"allow_typing": True,
}
]
}
]
result = get_ut_test(1, 123)
self.assertIsNotNone(result['data'])
self.assertEqual(result['data']['testId'], 123)
self.assertEqual(result['data']['title'], "Test")
self.mocked_cursor.fetchall.return_value = None
with self.assertRaises(HTTPException):
get_ut_test(1, 999)
def test_delete_ut_test_deletes_record(self):
self.mocked_cursor.return_value = 1
result = delete_ut_test(1, 123)
self.assertEqual(result['status'], 'success')
# def test_update_ut_test_updates_record(self):
# self.mocked_cursor.fetchall.return_value = [
# {
# "test_id": 123,
# "title": "Test",
# "created_by": 1,
# "created_at": datetime.datetime.now().isoformat(),
# "updated_at": datetime.datetime.now().isoformat(),
# "tasks": [
# {
# "task_id": 1,
# "test_id": 123,
# "title": "Task",
# "description": "Description",
# "allow_typing": True,
# }
# ]
# }
# ]
#
# result = update_ut_test(1, 123, UTTestUpdate(title="Updated Test"))
# self.assertEqual(result['status'], 'success')
# def test_update_status_updates_status(self):
# self.mock_pg_client.PostgresClient.return_value.__enter__.return_value.rowcount = 1
#
# result = update_status(1, 123, 'active')
#
# self.assertEqual('active', result['status'])
if __name__ == '__main__':
unittest.main()

View file

@ -0,0 +1,42 @@
from chalicelib.utils import pg_client, helper
from chalicelib.utils.storage import StorageClient
from decouple import config
def get_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT *
FROM public.ut_tests_signals
LEFT JOIN public.ut_tests_tasks USING (task_id)
WHERE session_id = %(session_id)s
ORDER BY timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
def has_test_signals(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT EXISTS(SELECT 1 FROM public.ut_tests_signals
WHERE session_id = %(session_id)s) AS has;""",
{"project_id": project_id, "session_id": session_id})
)
row = cur.fetchone()
return row.get("has")
def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool = True):
results = []
bucket_name = "uxtesting-records" # config("sessions_bucket")
k = f'{session_id}/ux_webcam_record.webm'
if check_existence and not StorageClient.exists(bucket=bucket_name, key=k):
return []
results.append(StorageClient.get_presigned_url_for_sharing(
bucket=bucket_name,
expires_in=100000,
key=k
))
return results

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
requests==2.31.0 requests==2.31.0
boto3==1.29.0 boto3==1.29.7
pyjwt==2.8.0 pyjwt==2.8.0
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
elasticsearch==8.11.0 elasticsearch==8.11.0

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
requests==2.31.0 requests==2.31.0
boto3==1.29.0 boto3==1.29.7
pyjwt==2.8.0 pyjwt==2.8.0
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
elasticsearch==8.11.0 elasticsearch==8.11.0

View file

@ -14,6 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
from chalicelib.core.collaboration_msteams import MSTeams from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack from chalicelib.core.collaboration_slack import Slack
from or_dependencies import OR_context, OR_role from or_dependencies import OR_context, OR_role
from chalicelib.core.usability_testing.routes import app as usability_testing_routes
from routers.base import get_routers from routers.base import get_routers
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
@ -860,3 +861,6 @@ async def check_recording_status(project_id: int):
@public_app.get('/', tags=["health"]) @public_app.get('/', tags=["health"])
def health_check(): def health_check():
return {} return {}
app.include_router(usability_testing_routes)

1
ee/api/.gitignore vendored
View file

@ -269,3 +269,4 @@ Pipfile.lock
/schemas/schemas.py /schemas/schemas.py
/schemas/transformers_validators.py /schemas/transformers_validators.py
/test/ /test/
/chalicelib/core/user_testing.py

View file

@ -1,6 +1,6 @@
import schemas import schemas
from chalicelib.core import events, metadata, events_ios, \ from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes, canvas, user_testing
from chalicelib.utils import errors_helper from chalicelib.utils import errors_helper
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
@ -140,6 +140,12 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
context=context, check_existence=False) context=context, check_existence=False)
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id) data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
data['utxVideo'] = user_testing.get_ux_webcam_signed_url(session_id=session_id,
project_id=project_id,
check_existence=False)
else:
data['utxVideo'] = []
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,

View file

@ -51,6 +51,7 @@ rm -rf ./chalicelib/core/sessions_mobs.py
rm -rf ./chalicelib/core/socket_ios.py rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps.py rm -rf ./chalicelib/core/sourcemaps.py
rm -rf ./chalicelib/core/sourcemaps_parser.py rm -rf ./chalicelib/core/sourcemaps_parser.py
rm -rf ./chalicelib/core/user_testing.py
rm -rf ./chalicelib/saml rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/__init__.py rm -rf ./chalicelib/utils/__init__.py
rm -rf ./chalicelib/utils/args_transformer.py rm -rf ./chalicelib/utils/args_transformer.py

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
requests==2.31.0 requests==2.31.0
boto3==1.28.79 boto3==1.29.7
pyjwt==2.8.0 pyjwt==2.8.0
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
elasticsearch==8.11.0 elasticsearch==8.11.0

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
requests==2.31.0 requests==2.31.0
boto3==1.28.79 boto3==1.29.7
pyjwt==2.8.0 pyjwt==2.8.0
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
elasticsearch==8.11.0 elasticsearch==8.11.0

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
requests==2.31.0 requests==2.31.0
boto3==1.29.0 boto3==1.29.7
pyjwt==2.8.0 pyjwt==2.8.0
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
elasticsearch==8.11.0 elasticsearch==8.11.0

View file

@ -1,5 +1,5 @@
\set previous_version 'v1.15.0' \set previous_version 'v1.15.0-ee'
\set next_version 'v1.16.0' \set next_version 'v1.16.0-ee'
SELECT openreplay_version() AS current_version, SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous, openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next openreplay_version() = :'next_version' AS is_next
@ -19,6 +19,80 @@ $fn_def$, :'next_version')
-- --
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'ui_tests_status') THEN
CREATE TYPE ui_tests_status AS ENUM ('preview', 'in-progress', 'paused', 'closed');
END IF;
END;
$$
LANGUAGE plpgsql;
CREATE TABLE IF NOT EXISTS public.ut_tests
(
test_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
starting_path VARCHAR(255) NULL,
status ui_tests_status NOT NULL,
require_mic BOOLEAN DEFAULT FALSE,
require_camera BOOLEAN DEFAULT FALSE,
description TEXT NULL,
guidelines TEXT NULL,
conclusion_message TEXT NULL,
created_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
updated_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
visibility BOOLEAN DEFAULT FALSE,
created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp without time zone NULL DEFAULT NULL
);
CREATE TABLE IF NOT EXISTS public.ut_tests_tasks
(
task_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
test_id integer NOT NULL REFERENCES ut_tests (test_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
description TEXT NULL,
allow_typing BOOLEAN DEFAULT FALSE
);
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'ut_signal_status') THEN
CREATE TYPE ut_signal_status AS ENUM ('begin', 'done', 'skipped');
END IF;
END;
$$
LANGUAGE plpgsql;
CREATE TABLE IF NOT EXISTS public.ut_tests_signals
(
signal_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
session_id BIGINT NULL REFERENCES public.sessions (session_id) ON DELETE SET NULL,
test_id integer NOT NULL REFERENCES public.ut_tests (test_id) ON DELETE CASCADE,
task_id integer NULL REFERENCES public.ut_tests_tasks (task_id) ON DELETE CASCADE,
status ut_signal_status NOT NULL,
comment TEXT NULL,
timestamp BIGINT NOT NULL,
duration BIGINT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS ut_tests_signals_unique_session_id_test_id_task_id_ts_idx ON public.ut_tests_signals (session_id, test_id, task_id, timestamp);
CREATE INDEX IF NOT EXISTS ut_tests_signals_session_id_idx ON public.ut_tests_signals (session_id);
CREATE TABLE IF NOT EXISTS events.canvas_recordings CREATE TABLE IF NOT EXISTS events.canvas_recordings
( (
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE, session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
@ -27,6 +101,16 @@ CREATE TABLE IF NOT EXISTS events.canvas_recordings
); );
CREATE INDEX IF NOT EXISTS canvas_recordings_session_id_idx ON events.canvas_recordings (session_id); CREATE INDEX IF NOT EXISTS canvas_recordings_session_id_idx ON events.canvas_recordings (session_id);
DROP SCHEMA IF EXISTS backup_v1_10_0 CASCADE;
UPDATE metrics
SET default_config='{
"col": 4,
"row": 2,
"position": 0
}'::jsonb
WHERE metric_type = 'pathAnalysis';
COMMIT; COMMIT;
\elif :is_next \elif :is_next

View file

@ -9,7 +9,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version() CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS RETURNS text AS
$$ $$
SELECT 'v1.15.0-ee' SELECT 'v1.16.0-ee'
$$ LANGUAGE sql IMMUTABLE; $$ LANGUAGE sql IMMUTABLE;
@ -1001,6 +1001,64 @@ $$
time BIGINT not null time BIGINT not null
); );
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'ui_tests_status') THEN
CREATE TYPE ui_tests_status AS ENUM ('preview', 'in-progress', 'paused', 'closed');
END IF;
CREATE TABLE IF NOT EXISTS public.ut_tests
(
test_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
starting_path VARCHAR(255) NULL,
status ui_tests_status NOT NULL,
require_mic BOOLEAN DEFAULT FALSE,
require_camera BOOLEAN DEFAULT FALSE,
description TEXT NULL,
guidelines TEXT NULL,
conclusion_message TEXT NULL,
created_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
updated_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
visibility BOOLEAN DEFAULT FALSE,
created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp without time zone NULL DEFAULT NULL
);
CREATE TABLE IF NOT EXISTS public.ut_tests_tasks
(
task_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
test_id integer NOT NULL REFERENCES ut_tests (test_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
description TEXT NULL,
allow_typing BOOLEAN DEFAULT FALSE
);
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'ut_signal_status') THEN
CREATE TYPE ui_tests_status AS ENUM ('begin', 'done', 'skipped');
END IF;
CREATE TABLE IF NOT EXISTS public.ut_tests_signals
(
signal_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
session_id BIGINT NULL REFERENCES public.sessions (session_id) ON DELETE SET NULL,
test_id integer NOT NULL REFERENCES public.ut_tests (test_id) ON DELETE CASCADE,
task_id integer NULL REFERENCES public.ut_tests_tasks (task_id) ON DELETE CASCADE,
status ut_signal_status NOT NULL,
comment TEXT NULL,
timestamp BIGINT NOT NULL,
duration BIGINT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS ut_tests_signals_unique_session_id_test_id_task_id_ts_idx ON public.ut_tests_signals (session_id, test_id, task_id, timestamp);
CREATE INDEX IF NOT EXISTS ut_tests_signals_session_id_idx ON public.ut_tests_signals (session_id);
RAISE NOTICE 'Created missing public schema tables'; RAISE NOTICE 'Created missing public schema tables';
END IF; END IF;
END; END;

View file

@ -19,6 +19,80 @@ $fn_def$, :'next_version')
-- --
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'ui_tests_status') THEN
CREATE TYPE ui_tests_status AS ENUM ('preview', 'in-progress', 'paused', 'closed');
END IF;
END;
$$
LANGUAGE plpgsql;
CREATE TABLE IF NOT EXISTS public.ut_tests
(
test_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
starting_path VARCHAR(255) NULL,
status ui_tests_status NOT NULL,
require_mic BOOLEAN DEFAULT FALSE,
require_camera BOOLEAN DEFAULT FALSE,
description TEXT NULL,
guidelines TEXT NULL,
conclusion_message TEXT NULL,
created_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
updated_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
visibility BOOLEAN DEFAULT FALSE,
created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp without time zone NULL DEFAULT NULL
);
CREATE TABLE IF NOT EXISTS public.ut_tests_tasks
(
task_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
test_id integer NOT NULL REFERENCES ut_tests (test_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
description TEXT NULL,
allow_typing BOOLEAN DEFAULT FALSE
);
DO
$$
BEGIN
IF NOT EXISTS(SELECT *
FROM pg_type typ
INNER JOIN pg_namespace nsp
ON nsp.oid = typ.typnamespace
WHERE nsp.nspname = current_schema()
AND typ.typname = 'ut_signal_status') THEN
CREATE TYPE ut_signal_status AS ENUM ('begin', 'done', 'skipped');
END IF;
END;
$$
LANGUAGE plpgsql;
CREATE TABLE IF NOT EXISTS public.ut_tests_signals
(
signal_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
session_id BIGINT NULL REFERENCES public.sessions (session_id) ON DELETE SET NULL,
test_id integer NOT NULL REFERENCES public.ut_tests (test_id) ON DELETE CASCADE,
task_id integer NULL REFERENCES public.ut_tests_tasks (task_id) ON DELETE CASCADE,
status ut_signal_status NOT NULL,
comment TEXT NULL,
timestamp BIGINT NOT NULL,
duration BIGINT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS ut_tests_signals_unique_session_id_test_id_task_id_ts_idx ON public.ut_tests_signals (session_id, test_id, task_id, timestamp);
CREATE INDEX IF NOT EXISTS ut_tests_signals_session_id_idx ON public.ut_tests_signals (session_id);
CREATE TABLE IF NOT EXISTS events.canvas_recordings CREATE TABLE IF NOT EXISTS events.canvas_recordings
( (
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE, session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
@ -27,6 +101,16 @@ CREATE TABLE IF NOT EXISTS events.canvas_recordings
); );
CREATE INDEX IF NOT EXISTS canvas_recordings_session_id_idx ON events.canvas_recordings (session_id); CREATE INDEX IF NOT EXISTS canvas_recordings_session_id_idx ON events.canvas_recordings (session_id);
DROP SCHEMA IF EXISTS backup_v1_10_0 CASCADE;
UPDATE metrics
SET default_config='{
"col": 4,
"row": 2,
"position": 0
}'::jsonb
WHERE metric_type = 'pathAnalysis';
COMMIT; COMMIT;
\elif :is_next \elif :is_next

View file

@ -9,7 +9,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version() CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS RETURNS text AS
$$ $$
SELECT 'v1.15.0' SELECT 'v1.16.0'
$$ LANGUAGE sql IMMUTABLE; $$ LANGUAGE sql IMMUTABLE;
@ -1116,6 +1116,54 @@ $$
CREATE INDEX swipes_timestamp_idx ON events_ios.swipes (timestamp); CREATE INDEX swipes_timestamp_idx ON events_ios.swipes (timestamp);
CREATE INDEX swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp); CREATE INDEX swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp);
CREATE TYPE ui_tests_status AS ENUM ('preview', 'in-progress', 'paused', 'closed');
CREATE TABLE public.ut_tests
(
test_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
starting_path VARCHAR(255) NULL,
status ui_tests_status NOT NULL,
require_mic BOOLEAN DEFAULT FALSE,
require_camera BOOLEAN DEFAULT FALSE,
description TEXT NULL,
guidelines TEXT NULL,
conclusion_message TEXT NULL,
created_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
updated_by integer REFERENCES public.users (user_id) ON DELETE SET NULL,
visibility BOOLEAN DEFAULT FALSE,
created_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
updated_at timestamp without time zone NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp without time zone NULL DEFAULT NULL
);
CREATE TABLE public.ut_tests_tasks
(
task_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
test_id integer NOT NULL REFERENCES ut_tests (test_id) ON DELETE CASCADE,
title VARCHAR(255) NOT NULL,
description TEXT NULL,
allow_typing BOOLEAN DEFAULT FALSE
);
CREATE TYPE ut_signal_status AS ENUM ('begin', 'done', 'skipped');
CREATE TABLE public.ut_tests_signals
(
signal_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
session_id BIGINT NULL REFERENCES public.sessions (session_id) ON DELETE SET NULL,
test_id integer NOT NULL REFERENCES public.ut_tests (test_id) ON DELETE CASCADE,
task_id integer NULL REFERENCES public.ut_tests_tasks (task_id) ON DELETE CASCADE,
status ut_signal_status NOT NULL,
comment TEXT NULL,
timestamp BIGINT NOT NULL,
duration BIGINT NULL
);
CREATE UNIQUE INDEX ut_tests_signals_unique_session_id_test_id_task_id_ts_idx ON public.ut_tests_signals (session_id, test_id, task_id, timestamp);
CREATE INDEX IF NOT EXISTS ut_tests_signals_session_id_idx ON public.ut_tests_signals (session_id);
CREATE TABLE events.canvas_recordings CREATE TABLE events.canvas_recordings
( (
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE, session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,