* feat(api): usability testing (#1686) * feat(api): usability testing - wip * feat(db): usabiity testing * feat(api): usability testing - api * feat(api): usability testing - api * feat(api): usability testing - db change * feat(api): usability testing - db change * feat(api): usability testing - unit tests update * feat(api): usability testing - test and tasks stats * feat(api): usability testing - sessions list fix, return zeros if test id is not having signals * Api v1.16.0 (#1698) * feat: canvas support [assist] (#1641) * feat(tracker/ui): start canvas support * feat(tracker): slpeer -> peerjs for canvas streams * fix(ui): fix agent canvas peer id * fix(ui): fix agent canvas peer id * fix(ui): fix peer removal * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(tracker): canvas recorder * feat(ui): canvas support for ui * fix(tracker): fix falling tests * feat(ui): replay canvas in video * feat(ui): refactor video streaming to draw on canvas * feat(ui): 10hz check for canvas replay * feat(ui): fix for tests * feat(ui): fix for tests * feat(ui): fix for tests * feat(ui): fix for tests cov * feat(ui): mroe test coverage * fix(ui): styling * fix(tracker): support backend settings for canvas * feat(ui): allow devtools to be resizeable (#1605) * fix(ui): console redux tab null check * Api v1.15.0 (#1689) * fix(chalice): fix create alert with MS Teams notification channel closes openreplay/openreplay#1677 * fix(chalice): fix MS Teams notifications * refactor(chalice): enhanced MS Teams notifications closes openreplay/openreplay#1681 (cherry picked from commit265897f509) * fix(ui): filter keys conflcit with metadata, path analysis 4 col * fix(ui): clear the filers and series on card type change * fix(player): fix msg reader bug * fix(DB): fix CH wrong version (#1692) (cherry picked from commit48dbbb55db) * fix(ui): filter keys conflcit with metadata * fix(tracker): unique broadcast channel name * fix(chalice): fixed delete cards (#1697) (cherry picked from commit92fedd310c) * fix(tracker): add trycatch to ignore iframe errors * feat(backend): added ARM arch support to backend services [Dockerfile] * feat(backend): removed userAgent from sessions and unstarted-sessions tables * fix(DB): change path-analysis card size --------- Co-authored-by: Delirium <nikita@openreplay.com> Co-authored-by: Shekar Siri <sshekarsiri@gmail.com> Co-authored-by: Alexander <zavorotynskiy@pm.me> * refactor(chalice): cleaned code (#1699) * feat(api): usability testing - added start_path to the resposne, remove count from the list * feat(api): usability testing - test to have response count and live count * feat(api): usability testing - test to have additional data * Revert "refactor(chalice): cleaned code (#1699)" (#1702) This reverts commit83f2b0c12c. * feat(api): usability testing - responses with total and other improvements * change(api): vulnerability whitelist udpate * feat(api): usability testing - create added missing columns, and sessions with user_id search * feat(api): usability testing - update test with responseCount * feat(api): usability testing - timestamps in unix * feat(api): usability testing - request with proper case change * feat(api): usability testing - task.description nullable * feat(api): usability testing - check deleted status * Api v1.16.0 (#1707) * fix(chalice): fixed search sessions * fix(chalice): fixed search sessions * refactor(chalice): upgraded dependencies * refactor(crons): upgraded dependencies * refactor(alerts): upgraded dependencies * Api v1.16.0 (#1712) * feat(DB): user-testing support * feat(chalice): user testing support * feat(chalice): support utxVideo (#1726) * feat(chalice): changed bucket name for ux testing webcamera videos * refactored(chalice): refactored code (#1743) refactored(chalice): upgraded dependencies --------- Co-authored-by: Shekar Siri <sshekarsiri@gmail.com> Co-authored-by: Delirium <nikita@openreplay.com> Co-authored-by: Alexander <zavorotynskiy@pm.me>
132 lines
4 KiB
Python
132 lines
4 KiB
Python
import logging
|
|
import time
|
|
from contextlib import asynccontextmanager
|
|
|
|
import psycopg_pool
|
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|
from decouple import config
|
|
from fastapi import FastAPI, Request
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from fastapi.middleware.gzip import GZipMiddleware
|
|
from psycopg import AsyncConnection
|
|
from starlette.responses import StreamingResponse
|
|
|
|
from chalicelib.utils import helper
|
|
from chalicelib.utils import pg_client
|
|
from crons import core_crons, core_dynamic_crons
|
|
from routers import core, core_dynamic
|
|
from routers.subs import insights, metrics, v1_api, health, usability_tests
|
|
|
|
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
|
print(f">Loglevel set to: {loglevel}")
|
|
logging.basicConfig(level=loglevel)
|
|
import orpy
|
|
from psycopg.rows import dict_row
|
|
|
|
|
|
class ORPYAsyncConnection(AsyncConnection):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
super().__init__(*args, row_factory=dict_row, **kwargs)
|
|
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
# Startup
|
|
logging.info(">>>>> starting up <<<<<")
|
|
ap_logger = logging.getLogger('apscheduler')
|
|
ap_logger.setLevel(loglevel)
|
|
|
|
app.schedule = AsyncIOScheduler()
|
|
await pg_client.init()
|
|
app.schedule.start()
|
|
|
|
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
|
|
app.schedule.add_job(id=job["func"].__name__, **job)
|
|
|
|
ap_logger.info(">Scheduled jobs:")
|
|
for job in app.schedule.get_jobs():
|
|
ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
|
|
|
database = {
|
|
"host": config("pg_host", default="localhost"),
|
|
"dbname": config("pg_dbname", default="orpy"),
|
|
"user": config("pg_user", default="orpy"),
|
|
"password": config("pg_password", default="orpy"),
|
|
"port": config("pg_port", cast=int, default=5432),
|
|
"application_name": "AIO" + config("APP_NAME", default="PY"),
|
|
}
|
|
|
|
database = psycopg_pool.AsyncConnectionPool(kwargs=database, connection_class=ORPYAsyncConnection)
|
|
orpy.set(orpy.Application(
|
|
database,
|
|
))
|
|
|
|
# App listening
|
|
yield
|
|
|
|
# Shutdown
|
|
await database.close()
|
|
logging.info(">>>>> shutting down <<<<<")
|
|
app.schedule.shutdown(wait=False)
|
|
await pg_client.terminate()
|
|
|
|
|
|
app = FastAPI(root_path=config("root_path", default="/api"), docs_url=config("docs_url", default=""),
|
|
redoc_url=config("redoc_url", default=""), lifespan=lifespan)
|
|
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
|
|
|
|
|
@app.middleware('http')
|
|
async def or_middleware(request: Request, call_next):
|
|
if helper.TRACK_TIME:
|
|
now = time.time()
|
|
try:
|
|
response: StreamingResponse = await call_next(request)
|
|
except:
|
|
logging.error(f"{request.method}: {request.url.path} FAILED!")
|
|
raise
|
|
if response.status_code // 100 != 2:
|
|
logging.warning(f"{request.method}:{request.url.path} {response.status_code}!")
|
|
if helper.TRACK_TIME:
|
|
now = time.time() - now
|
|
if now > 2:
|
|
now = round(now, 2)
|
|
logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}")
|
|
return response
|
|
|
|
|
|
origins = [
|
|
"*",
|
|
]
|
|
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=origins,
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
app.include_router(core.public_app)
|
|
app.include_router(core.app)
|
|
app.include_router(core.app_apikey)
|
|
app.include_router(core_dynamic.public_app)
|
|
app.include_router(core_dynamic.app)
|
|
app.include_router(core_dynamic.app_apikey)
|
|
app.include_router(metrics.app)
|
|
app.include_router(insights.app)
|
|
app.include_router(v1_api.app_apikey)
|
|
app.include_router(health.public_app)
|
|
app.include_router(health.app)
|
|
app.include_router(health.app_apikey)
|
|
|
|
app.include_router(usability_tests.public_app)
|
|
app.include_router(usability_tests.app)
|
|
app.include_router(usability_tests.app_apikey)
|
|
|
|
# @app.get('/private/shutdown', tags=["private"])
|
|
# async def stop_server():
|
|
# logging.info("Requested shutdown")
|
|
# await shutdown()
|
|
# import os, signal
|
|
# os.kill(1, signal.SIGTERM)
|