openreplay/api/chalicelib/core/db_request_handler.py
Rajesh Rajendran 4c7f2edd57
Api v1.16.0 (#1730)
* feat(api): usability testing (#1686)

* feat(api): usability testing - wip

* feat(db): usabiity testing

* feat(api): usability testing - api

* feat(api): usability testing - api

* feat(api): usability testing - db change

* feat(api): usability testing - db change

* feat(api): usability testing - unit tests update

* feat(api): usability testing - test and tasks stats

* feat(api): usability testing - sessions list fix, return zeros if test id is not having signals

* Api v1.16.0 (#1698)

* feat: canvas support [assist] (#1641)

* feat(tracker/ui): start canvas support

* feat(tracker): slpeer -> peerjs for canvas streams

* fix(ui): fix agent canvas peer id

* fix(ui): fix agent canvas peer id

* fix(ui): fix peer removal

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(tracker): canvas recorder

* feat(ui): canvas support for ui

* fix(tracker): fix falling tests

* feat(ui): replay canvas in video

* feat(ui): refactor video streaming to draw on canvas

* feat(ui): 10hz check for canvas replay

* feat(ui): fix for tests

* feat(ui): fix for tests

* feat(ui): fix for tests

* feat(ui): fix for tests cov

* feat(ui): mroe test coverage

* fix(ui): styling

* fix(tracker): support backend settings for canvas

* feat(ui): allow devtools to be resizeable (#1605)

* fix(ui): console redux tab null check

* Api v1.15.0 (#1689)

* fix(chalice): fix create alert with MS Teams notification channel
closes openreplay/openreplay#1677

* fix(chalice): fix MS Teams notifications
* refactor(chalice): enhanced MS Teams notifications
closes openreplay/openreplay#1681

(cherry picked from commit 265897f509)

* fix(ui): filter keys conflcit with metadata, path analysis 4 col

* fix(ui): clear the filers and series on card type change

* fix(player): fix msg reader bug

* fix(DB): fix CH wrong version (#1692)

(cherry picked from commit 48dbbb55db)

* fix(ui): filter keys conflcit with metadata

* fix(tracker): unique broadcast channel name

* fix(chalice): fixed delete cards (#1697)

(cherry picked from commit 92fedd310c)

* fix(tracker): add trycatch to ignore iframe errors

* feat(backend): added ARM arch support to backend services [Dockerfile]

* feat(backend): removed userAgent from sessions and unstarted-sessions tables

* fix(DB): change path-analysis card size

---------

Co-authored-by: Delirium <nikita@openreplay.com>
Co-authored-by: Shekar Siri <sshekarsiri@gmail.com>
Co-authored-by: Alexander <zavorotynskiy@pm.me>

* refactor(chalice): cleaned code (#1699)

* feat(api): usability testing - added start_path to the resposne, remove count from the list

* feat(api): usability testing - test to have response count and live count

* feat(api): usability testing - test to have additional data

* Revert "refactor(chalice): cleaned code (#1699)" (#1702)

This reverts commit 83f2b0c12c.

* feat(api): usability testing - responses with total and other improvements

* change(api): vulnerability whitelist udpate

* feat(api): usability testing - create added missing columns, and sessions with user_id search

* feat(api): usability testing - update test with responseCount

* feat(api): usability testing - timestamps in unix

* feat(api): usability testing - request with proper case change

* feat(api): usability testing - task.description nullable

* feat(api): usability testing - check deleted status

* Api v1.16.0 (#1707)

* fix(chalice): fixed search sessions

* fix(chalice): fixed search sessions
* refactor(chalice): upgraded dependencies
* refactor(crons): upgraded dependencies
* refactor(alerts): upgraded dependencies

* Api v1.16.0 (#1712)

* feat(DB): user-testing support

* feat(chalice): user testing support

* feat(chalice): support utxVideo (#1726)

* feat(chalice): changed bucket name for ux testing webcamera videos

---------

Co-authored-by: Shekar Siri <sshekarsiri@gmail.com>
Co-authored-by: Kraiem Taha Yassine <tahayk2@gmail.com>
Co-authored-by: Delirium <nikita@openreplay.com>
Co-authored-by: Alexander <zavorotynskiy@pm.me>
2023-11-30 10:53:31 +01:00

202 lines
7 KiB
Python

import logging
from chalicelib.utils import helper, pg_client
class DatabaseRequestHandler:
def __init__(self, table_name):
self.table_name = table_name
self.constraints = []
self.params = {}
self.order_clause = ""
self.sort_clause = ""
self.select_columns = []
self.sub_queries = []
self.joins = []
self.group_by_clause = ""
self.client = pg_client
self.logger = logging.getLogger(__name__)
self.pagination = {}
def add_constraint(self, constraint, param=None):
self.constraints.append(constraint)
if param:
self.params.update(param)
def add_subquery(self, subquery, alias, param=None):
self.sub_queries.append((subquery, alias))
if param:
self.params.update(param)
def add_join(self, join_clause):
self.joins.append(join_clause)
def add_param(self, key, value):
self.params[key] = value
def set_order_by(self, order_by):
self.order_clause = order_by
def set_sort_by(self, sort_by):
self.sort_clause = sort_by
def set_select_columns(self, columns):
self.select_columns = columns
def set_group_by(self, group_by_clause):
self.group_by_clause = group_by_clause
def set_pagination(self, page, page_size):
"""
Set pagination parameters for the query.
:param page: The page number (1-indexed)
:param page_size: Number of items per page
"""
self.pagination = {
'offset': (page - 1) * page_size,
'limit': page_size
}
def build_query(self, action="select", additional_clauses=None, data=None):
if action == "select":
query = f"SELECT {', '.join(self.select_columns)} FROM {self.table_name}"
elif action == "insert":
columns = ', '.join(data.keys())
placeholders = ', '.join(f'%({k})s' for k in data.keys())
query = f"INSERT INTO {self.table_name} ({columns}) VALUES ({placeholders})"
elif action == "update":
set_clause = ', '.join(f"{k} = %({k})s" for k in data.keys())
query = f"UPDATE {self.table_name} SET {set_clause}"
elif action == "delete":
query = f"DELETE FROM {self.table_name}"
for join in self.joins:
query += f" {join}"
for subquery, alias in self.sub_queries:
query += f", ({subquery}) AS {alias}"
if self.constraints:
query += " WHERE " + " AND ".join(self.constraints)
if action == "select":
if self.group_by_clause:
query += " GROUP BY " + self.group_by_clause
if self.sort_clause:
query += " ORDER BY " + self.sort_clause
if self.order_clause:
query += " " + self.order_clause
if hasattr(self, 'pagination') and self.pagination:
query += " LIMIT %(limit)s OFFSET %(offset)s"
self.params.update(self.pagination)
if additional_clauses:
query += " " + additional_clauses
logging.info(f"Query: {query}")
return query
def execute_query(self, query, data=None):
try:
with self.client.PostgresClient() as cur:
mogrified_query = cur.mogrify(query, {**data, **self.params} if data else self.params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def fetchall(self):
query = self.build_query()
return self.execute_query(query)
def fetchone(self):
query = self.build_query()
result = self.execute_query(query)
return result[0] if result else None
def insert(self, data):
query = self.build_query(action="insert", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
return result[0] if result else None
def update(self, data):
query = self.build_query(action="update", data=data)
query += " RETURNING *;"
result = self.execute_query(query, data)
return result[0] if result else None
def delete(self):
query = self.build_query(action="delete")
return self.execute_query(query)
def batch_insert(self, items):
if not items:
return None
columns = ', '.join(items[0].keys())
# Building a values string with unique parameter names for each item
all_values_query = ', '.join(
'(' + ', '.join([f"%({key}_{i})s" for key in item]) + ')'
for i, item in enumerate(items)
)
query = f"INSERT INTO {self.table_name} ({columns}) VALUES {all_values_query} RETURNING *;"
try:
with self.client.PostgresClient() as cur:
# Flatten items into a single dictionary with unique keys
combined_params = {f"{k}_{i}": v for i, item in enumerate(items) for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
return cur.fetchall()
except Exception as e:
self.logger.error(f"Database batch insert operation failed: {e}")
raise
def raw_query(self, query, params=None):
try:
with self.client.PostgresClient() as cur:
mogrified_query = cur.mogrify(query, params)
cur.execute(mogrified_query)
return cur.fetchall() if cur.description else None
except Exception as e:
self.logger.error(f"Database operation failed: {e}")
raise
def batch_update(self, items):
if not items:
return None
id_column = list(items[0])[0]
# Building the set clause for the update statement
update_columns = list(items[0].keys())
update_columns.remove(id_column)
set_clause = ', '.join([f"{col} = v.{col}" for col in update_columns])
# Building the values part for the 'VALUES' section
values_rows = []
for item in items:
values = ', '.join([f"%({key})s" for key in item.keys()])
values_rows.append(f"({values})")
values_query = ', '.join(values_rows)
# Constructing the full update query
query = f"""
UPDATE {self.table_name} AS t
SET {set_clause}
FROM (VALUES {values_query}) AS v ({', '.join(items[0].keys())})
WHERE t.{id_column} = v.{id_column};
"""
try:
with self.client.PostgresClient() as cur:
# Flatten items into a single dictionary for mogrify
combined_params = {k: v for item in items for k, v in item.items()}
mogrified_query = cur.mogrify(query, combined_params)
cur.execute(mogrified_query)
except Exception as e:
self.logger.error(f"Database batch update operation failed: {e}")
raise