Merge branch 'dev' into live-se-red

This commit is contained in:
nick-delirium 2024-12-20 10:56:58 +01:00
commit fbf7d716a6
No known key found for this signature in database
GPG key ID: 93ABD695DF5FDBA0
53 changed files with 2372 additions and 698 deletions

View file

@ -1,3 +1,4 @@
import logging
from os import access, R_OK
from os.path import exists as path_exists, getsize
@ -10,6 +11,8 @@ import schemas
from chalicelib.core import projects
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
ASSIST_KEY = config("ASSIST_KEY")
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
@ -52,21 +55,21 @@ def __get_live_sessions_ws(project_id, data):
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
logger.error(results.text)
return {"total": 0, "sessions": []}
live_peers = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
live_peers = {"total": 0, "sessions": []}
except Exception as e:
print("!! Issue getting Live-Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Live-Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
live_peers = {"total": 0, "sessions": []}
_live_peers = live_peers
if "sessions" in live_peers:
@ -102,8 +105,8 @@ def get_live_session_by_id(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
logger.error(results.text)
return None
results = results.json().get("data")
if results is None:
@ -111,16 +114,16 @@ def get_live_session_by_id(project_id, session_id):
results["live"] = True
results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id)
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return None
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return None
return results
@ -132,21 +135,21 @@ def is_live(project_id, session_id, project_key=None):
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for is_live")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for is_live")
logger.error(results.text)
return False
results = results.json().get("data")
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return False
return str(session_id) == results
@ -161,21 +164,21 @@ def autocomplete(project_id, q: str, key: str = None):
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for autocomplete")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for autocomplete")
logger.error(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return {"errors": ["Something went wrong wile calling assist"]}
for r in results:
r["type"] = __change_keys(r["type"])
@ -239,24 +242,24 @@ def session_exists(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for session_exists")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for session_exists")
logger.error(results.text)
return None
results = results.json().get("data")
if results is None:
return False
return True
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return False

View file

@ -59,10 +59,10 @@ def __get_autocomplete_table(value, project_id):
"c_list": tuple(c_list)}
results = []
try:
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
except Exception as err:
logger.exception("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
logger.exception(cur.format(query=query, params=params))
logger.exception(cur.format(query=query, parameters=params))
logger.exception("--------- PARAMS -----------")
logger.exception(params)
logger.exception("--------- VALUE -----------")
@ -119,7 +119,7 @@ def __generic_autocomplete(event: Event):
query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
return helper.list_to_camel_case(results)
return f
@ -137,7 +137,7 @@ def __generic_autocomplete_metas(typename):
return []
query = __generic_query(typename, value_length=len(text))
rows = cur.execute(query=query, params=params)
rows = cur.execute(query=query, parameters=params)
return rows
return f
@ -335,5 +335,5 @@ def get_top_values(project_id, event_type, event_key=None):
SELECT c_value AS value, row_count, truncate(row_count * 100 / total_count,2) AS row_percentage
FROM raw;"""
params = {"project_id": project_id}
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
return helper.list_to_camel_case(results)

View file

@ -1,12 +1,11 @@
from decouple import config
import schemas
from . import errors as errors_legacy
from chalicelib.core import metrics, metadata
from chalicelib.core import metadata
from chalicelib.core import sessions
from chalicelib.core.metrics import metrics
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from . import errors as errors_legacy
def _multiple_values(values, value_key="value"):
@ -290,7 +289,7 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------")
# print(ch.format(main_ch_query, params))
# print("--------------------")
row = ch.execute(query=main_ch_query, params=params)
row = ch.execute(query=main_ch_query, parameters=params)
if len(row) == 0:
return {"errors": ["error not found"]}
row = row[0]
@ -309,7 +308,7 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------")
# print(ch.format(query, params))
# print("--------------------")
status = ch.execute(query=query, params=params)
status = ch.execute(query=query, parameters=params)
if status is not None:
status = status[0]
@ -650,7 +649,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
# print(ch.format(main_ch_query, params))
# print("------------")
rows = ch.execute(query=main_ch_query, params=params)
rows = ch.execute(query=main_ch_query, parameters=params)
total = rows[0]["total"] if len(rows) > 0 else 0
for r in rows:

View file

@ -1,6 +1,10 @@
import logging
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
logger = logging.getLogger(__name__)
class Actions:
@ -150,23 +154,23 @@ def get_scheduled_jobs():
def execute_jobs():
jobs = get_scheduled_jobs()
for job in jobs:
print(f"Executing jobId:{job['jobId']}")
logger.info(f"Executing jobId:{job['jobId']}")
try:
if job["action"] == Actions.DELETE_USER_DATA:
session_ids = __get_session_ids_by_user_ids(project_id=job["projectId"],
user_ids=[job["referenceId"]])
if len(session_ids) > 0:
print(f"Deleting {len(session_ids)} sessions")
logger.info(f"Deleting {len(session_ids)} sessions")
__delete_sessions_by_session_ids(session_ids=session_ids)
__delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
else:
raise Exception(f"The action '{job['action']}' not supported.")
job["status"] = JobStatus.COMPLETED
print(f"Job completed {job['jobId']}")
logger.info(f"Job completed {job['jobId']}")
except Exception as e:
job["status"] = JobStatus.FAILED
job["errors"] = str(e)
print(f"Job failed {job['jobId']}")
logger.error(f"Job failed {job['jobId']}")
update(job["jobId"], job)

View file

@ -4,7 +4,8 @@ import logging
from fastapi import HTTPException, status
import schemas
from chalicelib.core import errors, issues
from chalicelib.core import issues
from chalicelib.core.errors import errors
from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined
from chalicelib.core.sessions import sessions
from chalicelib.utils import helper, pg_client

View file

@ -1,6 +1,7 @@
import json
from typing import Optional, List
import logging
from collections import Counter
from typing import Optional, List
from fastapi import HTTPException, status
@ -9,6 +10,8 @@ from chalicelib.core import users
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
@ -410,7 +413,7 @@ def update_project_conditions(project_id, conditions):
create_project_conditions(project_id, to_be_created)
if to_be_updated:
print(to_be_updated)
logger.debug(to_be_updated)
update_project_condition(project_id, to_be_updated)
return get_conditions(project_id)

View file

@ -468,8 +468,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
if len(data.filters) > 0:
meta_keys = None
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"

View file

@ -59,31 +59,6 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
"""
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.IS_UNDEFINED]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
@ -508,7 +483,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
data.filters.append(
schemas.SessionSearchFilterSchema(value=[issue['type']],
type=schemas.FilterType.ISSUE.value,
operator='is')
operator=schemas.SearchEventOperator.IS.value)
)
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
@ -541,16 +516,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
include_in_events = False
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, f_k: f.value, **_multiple_values(f.value, value_key=f_k)}
full_args = {**full_args, f_k: f.value, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
@ -562,9 +535,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_browser)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
if is_any:
@ -572,9 +546,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_os)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
if is_any:
@ -582,9 +556,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_device)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
if is_any:
@ -592,9 +566,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_country)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in schemas.FilterType.USER_CITY:
if is_any:
@ -602,9 +577,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_city)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in schemas.FilterType.USER_STATE:
if is_any:
@ -612,9 +587,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_state)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
@ -625,11 +600,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_source)')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
extra_constraints.append('isNotNull(s.utm_medium)')
@ -639,11 +614,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_medium')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
extra_constraints.append('isNotNull(s.utm_campaign)')
@ -653,11 +628,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_campaign)')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
@ -674,11 +649,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.base_referrer)')
else:
extra_constraints.append(
_multiple_conditions(f"s.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
@ -693,11 +668,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(f"isNull(ms.{metadata.index_to_colname(meta_keys[f.source])})")
else:
extra_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
@ -709,11 +684,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.user_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
@ -724,11 +699,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.user_anonymous_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.rev_id)')
@ -738,19 +713,19 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.rev_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.PLATFORM:
# op = sh.get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.ISSUE:
if is_any:
extra_constraints.append("notEmpty(s.issue_types)")
@ -760,21 +735,21 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
issues.append(f)
extra_constraints.append(f"hasAny(s.issue_types,%({f_k})s)")
# _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
# sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
# value_key=f_k))
ss_constraints.append(f"hasAny(ms.issue_types,%({f_k})s)")
# _multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
# sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
# value_key=f_k))
if is_not:
extra_constraints[-1] = f"not({extra_constraints[-1]})"
ss_constraints[-1] = f"not({ss_constraints[-1]})"
elif filter_type == schemas.FilterType.EVENTS_COUNT:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
else:
continue
include_in_events = True
@ -788,7 +763,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
@ -800,7 +775,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
or_events = data.events_order == schemas.SearchEventOrder.OR
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not __is_valid_event(is_any=is_any, event=event):
@ -822,8 +797,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -835,19 +810,19 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(
_multiple_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.CLICK_MOBILE.column
@ -856,15 +831,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT.ui_type:
@ -876,20 +851,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
else:
_column = events.EventType.INPUT_MOBILE.column
event_where.append(
@ -897,15 +872,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.LOCATION.ui_type:
@ -917,15 +892,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.VIEW_MOBILE.column
@ -934,15 +909,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -951,14 +926,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -967,14 +942,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.STATEACTION.ui_type:
@ -984,14 +959,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
# TODO: isNot for ERROR
elif event_type == events.EventType.ERROR.ui_type:
@ -1003,12 +978,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
@ -1021,14 +996,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
_column = events.EventType.INPUT_MOBILE.column
@ -1036,14 +1011,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
_column = events.EventType.VIEW_MOBILE.column
@ -1051,14 +1026,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
_column = events.EventType.CUSTOM_MOBILE.column
@ -1066,14 +1041,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -1082,14 +1057,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
_column = events.EventType.CRASH_MOBILE.column
@ -1097,14 +1072,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
_column = events.EventType.SWIPE_MOBILE.column
@ -1112,14 +1087,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
@ -1130,14 +1105,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
col = performance_event.get_col(event_type)
colname = col["column"]
@ -1157,15 +1132,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
tname = "main"
if not is_any:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# TODO: isNot for PerformanceEvent
@ -1180,15 +1155,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
tname = "main"
if not is_any:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
@ -1199,44 +1174,44 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
apply = False
events_conditions[-1]["condition"] = []
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.url_path {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(
_multiple_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
else:
@ -1252,29 +1227,29 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
events_conditions[-1]["condition"] = []
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
else:
logging.warning(f"undefined GRAPHQL filter: {f.type}")
@ -1464,9 +1439,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
for i, f in enumerate(issues):
f_k_v = f"f_issue_v{i}"
f_k_s = f_k_v + "_source"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k_v), f_k_s: f.source}
issues_conditions.append(_multiple_conditions(f"issues.type=%({f_k_v})s", f.value,
value_key=f_k_v))
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k_v), f_k_s: f.source}
issues_conditions.append(sh.multi_conditions(f"issues.type=%({f_k_v})s", f.value,
value_key=f_k_v))
issues_conditions[-1] = f"({issues_conditions[-1]} AND issues.context_string=%({f_k_s})s)"
extra_join = f"""INNER JOIN (SELECT DISTINCT events.session_id
FROM experimental.issues
@ -1483,17 +1458,17 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if extra_conditions and len(extra_conditions) > 0:
_extra_or_condition = []
for i, c in enumerate(extra_conditions):
if _isAny_opreator(c.operator):
if sh.isAny_opreator(c.operator):
continue
e_k = f"ec_value{i}"
op = sh.get_sql_operator(c.operator)
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**_multiple_values(c.value, value_key=e_k)}
**sh.multi_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type:
_extra_or_condition.append(
_multiple_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
else:
logging.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0:

View file

@ -1,7 +1,11 @@
import logging
from chalicelib.utils import pg_client, helper, email_helper, smtp
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import get_issue_title
logger = logging.getLogger(__name__)
LOWEST_BAR_VALUE = 3
@ -30,7 +34,7 @@ def edit_config(user_id, weekly_report):
def cron():
if not smtp.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
logger.info("!!! No SMTP configuration found, ignoring weekly report")
return
_now = TimeUTC.now()
with pg_client.PostgresClient(unlimited_query=True) as cur:
@ -88,17 +92,17 @@ def cron():
) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report query: {_now2 - _now} ms")
logger.debug(f">> Weekly report query: {_now2 - _now} ms")
_now = _now2
emails_to_send = []
for p in projects_data:
params["project_id"] = p["project_id"]
print(f"checking {p['project_name']} : {p['project_id']}")
logger.debug(f"checking {p['project_name']} : {p['project_id']}")
if len(p["emails"]) == 0 \
or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0:
print('ignore')
logger.debug('ignore')
continue
print("valid")
logger.debug("valid")
p["past_week_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
p["past_month_issues_evolution"] = helper.__decimal_limit(
@ -121,7 +125,7 @@ def cron():
ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
max_days_partition = max(x['issues_count'] for x in days_partition)
for d in days_partition:
@ -140,7 +144,7 @@ def cron():
LIMIT 4;""", params))
issues_by_type = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
max_issues_by_type = sum(i["count"] for i in issues_by_type)
for i in issues_by_type:
@ -172,7 +176,7 @@ def cron():
ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
for i in issues_breakdown_by_day:
i["sum"] = sum(x["count"] for x in i["partition"])
@ -221,7 +225,7 @@ def cron():
ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
if len(issues_breakdown_list) > 4:
others = {"type": "Others",
@ -255,6 +259,6 @@ def cron():
"issues_breakdown_by_day": issues_breakdown_by_day,
"issues_breakdown_list": issues_breakdown_list
}})
print(f">>> Sending weekly report to {len(emails_to_send)} email-group")
logger.info(f">>> Sending weekly report to {len(emails_to_send)} email-group")
for e in emails_to_send:
email_helper.weekly_report2(recipients=e["email"], data=e["data"])

View file

@ -16,6 +16,16 @@ def get_main_events_table(timestamp=0, platform="web"):
def get_main_sessions_table(timestamp=0):
return "experimental.sessions"
def get_user_favorite_sessions_table(timestamp=0):
return "experimental.user_favorite_sessions"
def get_user_viewed_sessions_table(timestamp=0):
return "experimental.user_viewed_sessions"
def get_user_viewed_errors_table(timestamp=0):
return "experimental.user_viewed_errors"
def get_main_js_errors_sessions_table(timestamp=0):

View file

@ -1,5 +1,5 @@
from typing import Union
from enum import Enum
import schemas
@ -49,7 +49,7 @@ def multi_values(values, value_key="value"):
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i]
return query_values

View file

@ -8,8 +8,9 @@ from chalicelib.core import sourcemaps, events, projects, alerts, issues, \
metadata, reset_password, \
log_tools, sessions, announcements, \
weekly_report, assist, mobile, tenants, boarding, \
notifications, webhook, users, saved_search, tags, autocomplete
notifications, webhook, users, saved_search, tags
from chalicelib.core.metrics import custom_metrics
from chalicelib.core.autocomplete import autocomplete
from chalicelib.core.issue_tracking import github, integrations_global, integrations_manager, \
jira_cloud
from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \

View file

@ -2,6 +2,7 @@ package main
import (
"context"
analyticsConfig "openreplay/backend/internal/config/analytics"
"openreplay/backend/pkg/analytics"
"openreplay/backend/pkg/db/postgres/pool"

View file

@ -9,35 +9,37 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.2.0
github.com/DataDog/datadog-api-client-go/v2 v2.30.0
github.com/Masterminds/semver v1.5.0
github.com/andybalholm/brotli v1.1.0
github.com/aws/aws-sdk-go v1.44.98
github.com/andybalholm/brotli v1.1.1
github.com/aws/aws-sdk-go v1.44.334
github.com/btcsuite/btcutil v1.0.2
github.com/confluentinc/confluent-kafka-go/v2 v2.4.0
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/docker/distribution v2.8.3+incompatible
github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/elastic/go-elasticsearch/v8 v8.13.0
github.com/getsentry/sentry-go v0.29.0
github.com/elastic/go-elasticsearch/v8 v8.13.1
github.com/fernet/fernet-go v0.0.0-20240119011108-303da6aec611
github.com/getsentry/sentry-go v0.29.1
github.com/go-playground/validator/v10 v10.23.0
github.com/go-redis/redis v6.15.9+incompatible
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/google/uuid v1.6.0
github.com/gorilla/mux v1.8.1
github.com/jackc/pgconn v1.14.3
github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451
github.com/jackc/pgtype v1.14.0
github.com/jackc/pgx/v4 v4.18.2
github.com/klauspost/compress v1.17.7
github.com/klauspost/pgzip v1.2.5
github.com/lib/pq v1.10.2
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438
github.com/jackc/pgtype v1.14.4
github.com/jackc/pgx/v4 v4.18.3
github.com/klauspost/compress v1.17.11
github.com/klauspost/pgzip v1.2.6
github.com/lib/pq v1.10.9
github.com/oschwald/maxminddb-golang v1.7.0
github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.16.0
github.com/rs/xid v1.2.1
github.com/sethvargo/go-envconfig v0.7.0
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe
github.com/ua-parser/uap-go v0.0.0-20241012191800-bbb40edc15aa
go.uber.org/zap v1.17.0
golang.org/x/net v0.23.0
golang.org/x/net v0.33.0
google.golang.org/api v0.169.0
)
@ -51,33 +53,36 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0 // indirect
github.com/DataDog/zstd v1.5.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/elastic/elastic-transport-go/v8 v8.5.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.23.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/googleapis/gax-go/v2 v2.12.2 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
github.com/googleapis/gax-go/v2 v2.12.3 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.3 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/puddle v1.3.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.0 // indirect
github.com/paulmach/orb v0.7.1 // indirect
github.com/pierrec/lz4/v4 v4.1.15 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/prometheus/client_model v0.4.0 // indirect
github.com/prometheus/common v0.42.0 // indirect
github.com/prometheus/procfs v0.10.1 // indirect
@ -89,20 +94,21 @@ require (
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/sdk v1.22.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/oauth2 v0.17.0 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.5.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 // indirect
google.golang.org/grpc v1.62.1 // indirect
google.golang.org/protobuf v1.33.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20241216192217-9240e9c98484 // indirect
google.golang.org/grpc v1.62.2 // indirect
google.golang.org/protobuf v1.36.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)

View file

@ -1,18 +1,10 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM=
cloud.google.com/go v0.112.1/go.mod h1:+Vbu+Y1UU+I1rjmzeMOb/8RfkKJK2Gyxi1X6jJCZLo4=
cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU=
cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls=
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
cloud.google.com/go/iam v1.1.7 h1:z4VHOhwKLF/+UYXAJDFwGtNF0b6gjsW1Pk9Ml0U/IoM=
cloud.google.com/go/iam v1.1.7/go.mod h1:J4PMPg8TtyurAUvSmPj8FF3EDgY1SPRZxcUGrn7WXGA=
cloud.google.com/go/logging v1.9.0 h1:iEIOXFO9EmSiTjDmfpbRjOxECO7R8C7b8IXUGOj7xZw=
cloud.google.com/go/logging v1.9.0/go.mod h1:1Io0vnZv4onoUnsVUQY3HZ3Igb1nBchky0A0y7BBBhE=
cloud.google.com/go/longrunning v0.5.6 h1:xAe8+0YaWoCKr9t1+aWe+OeQgN/iJK1fEgZSXmjuEaE=
cloud.google.com/go/longrunning v0.5.6/go.mod h1:vUaDrWYOMKRuhiv6JBnn49YxCPz2Ayn9GqyjaBT8/mA=
cloud.google.com/go/storage v1.38.0 h1:Az68ZRGlnNTpIBbLjSMIV2BDcwwXYlRlQzis0llkpJg=
cloud.google.com/go/storage v1.38.0/go.mod h1:tlUADB0mAb9BgYls9lq+8MGkfzOXuLrnHXlpHmvFJoY=
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
@ -50,10 +42,10 @@ github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7
github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo=
github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/aws/aws-sdk-go v1.44.334 h1:h2bdbGb//fez6Sv6PaYv868s9liDeoYM6hYsAqTB4MU=
github.com/aws/aws-sdk-go v1.44.334/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go-v2 v1.17.6 h1:Y773UK7OBqhzi5VDXMi1zVGsoj+CVHs2eaC2bDsLwi0=
github.com/aws/aws-sdk-go-v2 v1.17.6/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw=
github.com/aws/aws-sdk-go-v2/config v1.18.16 h1:4r7gsCu8Ekwl5iJGE/GmspA2UifqySCCkyyyPFeWs3w=
@ -156,8 +148,8 @@ github.com/elastic/elastic-transport-go/v8 v8.5.0 h1:v5membAl7lvQgBTexPRDBO/Rdnl
github.com/elastic/elastic-transport-go/v8 v8.5.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
github.com/elastic/go-elasticsearch/v7 v7.13.1 h1:PaM3V69wPlnwR+ne50rSKKn0RNDYnnOFQcuGEI0ce80=
github.com/elastic/go-elasticsearch/v7 v7.13.1/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
github.com/elastic/go-elasticsearch/v8 v8.13.0 h1:YXPAWpvbYX0mWSNG9tnEpvs4h1stgMy5JUeKZECYYB8=
github.com/elastic/go-elasticsearch/v8 v8.13.0/go.mod h1:DIn7HopJs4oZC/w0WoJR13uMUxtHeq92eI5bqv5CRfI=
github.com/elastic/go-elasticsearch/v8 v8.13.1 h1:du5F8IzUUyCkzxyHdrO9AtopcG95I/qwi2WK8Kf1xlg=
github.com/elastic/go-elasticsearch/v8 v8.13.1/go.mod h1:DIn7HopJs4oZC/w0WoJR13uMUxtHeq92eI5bqv5CRfI=
github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ=
github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@ -166,6 +158,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fernet/fernet-go v0.0.0-20240119011108-303da6aec611 h1:JwYtKJ/DVEoIA5dH45OEU7uoryZY/gjd/BQiwwAOImM=
github.com/fernet/fernet-go v0.0.0-20240119011108-303da6aec611/go.mod h1:zHMNeYgqrTpKyjawjitDg0Osd1P/FmeA0SZLYK3RfLQ=
github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw=
github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@ -173,16 +167,16 @@ github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM4
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/getsentry/sentry-go v0.29.0 h1:YtWluuCFg9OfcqnaujpY918N/AhCCwarIDWOYSBAjCA=
github.com/getsentry/sentry-go v0.29.0/go.mod h1:jhPesDAL0Q0W2+2YEuVOvdWmVtdsr1+jtBrlDEVWwLY=
github.com/getsentry/sentry-go v0.29.1 h1:DyZuChN8Hz3ARxGVV8ePaNXh1dQ7d76AiB117xcREwA=
github.com/getsentry/sentry-go v0.29.1/go.mod h1:x3AtIzN01d6SiWkderzaH28Tm0lgkafpJ5Bm3li39O0=
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
@ -194,6 +188,8 @@ github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZ
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng=
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
@ -221,8 +217,7 @@ github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17w
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@ -252,7 +247,6 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
@ -260,10 +254,8 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA=
github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc=
github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4=
github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
@ -280,6 +272,8 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
@ -300,8 +294,8 @@ github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w=
github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM=
github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451 h1:WAvSpGf7MsFuzAtK4Vk7R4EVe+liW4x83r4oWu0WHKw=
github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0=
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
@ -320,20 +314,23 @@ github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwX
github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag=
github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw=
github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgtype v1.14.4 h1:fKuNiCumbKTAIxQwXfB/nsrnkEI6bPJrrSiMKgbJ2j8=
github.com/jackc/pgtype v1.14.4/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA=
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
github.com/jackc/pgx/v4 v4.18.2 h1:xVpYkNR5pk5bMCZGfClbO962UIqVABcAGt7ha1s/FeU=
github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw=
github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA=
github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw=
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
@ -357,10 +354,10 @@ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:C
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE=
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@ -377,8 +374,9 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8=
github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
@ -457,8 +455,9 @@ github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKf
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0=
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
@ -556,14 +555,16 @@ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs=
github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe h1:aj/vX5epIlQQBEocKoM9nSAiNpakdQzElc8SaRFPu+I=
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
github.com/ua-parser/uap-go v0.0.0-20241012191800-bbb40edc15aa h1:VzPR4xFM7HARqNocjdHg75ZL9SAgFtaF3P57ZdDcG6I=
github.com/ua-parser/uap-go v0.0.0-20241012191800-bbb40edc15aa/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
@ -571,7 +572,6 @@ github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQ
github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
@ -638,8 +638,10 @@ golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWP
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
@ -652,8 +654,9 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic=
golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -666,10 +669,14 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ=
golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA=
@ -680,8 +687,9 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -700,19 +708,26 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@ -720,8 +735,12 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -739,15 +758,15 @@ golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapK
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY=
google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@ -760,15 +779,15 @@ google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAU
google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM=
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4=
google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241216192217-9240e9c98484 h1:Z7FRVJPSMaHQxD0uXU8WdgFh8PseLM8Q8NzhnpMrBhQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241216192217-9240e9c98484/go.mod h1:lcTa1sDdWEIHMWlITnIczmw5w60CF9ffkb8Z+DVmmjA=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
google.golang.org/grpc v1.62.2 h1:iEIj1U5qjyBjzkM5nk3Fq+S1IbjbXSyqeULZ1Nfo4AA=
google.golang.org/grpc v1.62.2/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@ -781,8 +800,8 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.36.0 h1:mjIs9gYtt56AzC4ZaffQuh88TZurBGhIJMBZGSxNerQ=
google.golang.org/protobuf v1.36.0/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=

View file

@ -1,10 +1,10 @@
package models
package api
import (
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"net/http"
"openreplay/backend/pkg/analytics/api/models"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/user"
"strconv"
@ -13,22 +13,6 @@ import (
"github.com/go-playground/validator/v10"
)
// getCardId returns the ID from the request
func getCardId(r *http.Request) (int64, error) {
vars := mux.Vars(r)
idStr := vars["id"]
if idStr == "" {
return 0, fmt.Errorf("invalid Card ID")
}
id, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
return 0, fmt.Errorf("invalid Card ID")
}
return id, nil
}
func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
@ -40,7 +24,7 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) {
}
bodySize = len(bodyBytes)
req := &CardCreateRequest{}
req := &models.CardCreateRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
@ -53,30 +37,18 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) {
return
}
// TODO save card to DB
resp := &CardGetResponse{
Card: Card{
CardID: 1,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
DeletedAt: nil,
EditedAt: nil,
ProjectID: 1,
UserID: 1,
CardBase: CardBase{
Name: req.Name,
IsPublic: req.IsPublic,
Thumbnail: req.Thumbnail,
MetricType: req.MetricType,
MetricOf: req.MetricOf,
Series: req.Series,
},
},
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
currentUser := r.Context().Value("userData").(*user.User)
e.log.Info(r.Context(), "User ID: ", currentUser.ID)
resp, err := e.service.CreateCard(projectID, currentUser.ID, req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
@ -86,68 +58,128 @@ func (e *handlersImpl) getCard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
id, err := getCardId(r)
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
thumbnail := "https://example.com/image.png"
id, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
// TODO get card from DB
resp := &CardGetResponse{
Card: Card{
CardID: id,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
DeletedAt: nil,
EditedAt: nil,
ProjectID: 1,
UserID: 1,
CardBase: CardBase{
Name: "My Card",
IsPublic: true,
Thumbnail: &thumbnail,
MetricType: "timeseries",
MetricOf: "session_count",
},
},
resp, err := e.service.GetCardWithSeries(projectID, id)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
// get cards paginated
func (e *handlersImpl) getCards(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
// TODO get cards from DB
thumbnail := "https://example.com/image.png"
resp := &GetCardsResponse{
Cards: []Card{
{
CardID: 1,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
DeletedAt: nil,
EditedAt: nil,
ProjectID: 1,
UserID: 1,
CardBase: CardBase{
Name: "My Card",
IsPublic: true,
Thumbnail: &thumbnail,
MetricType: "timeseries",
MetricOf: "session_count",
},
},
},
Total: 10,
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
//currentUser := r.Context().Value("userData").(*user.User)
resp, err := e.service.GetCards(projectID)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) getCardsPaginated(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
// Extract projectID from request
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
// Parse query parameters
query := r.URL.Query()
// Filters
filters := models.CardListFilter{
Filters: make(map[string]interface{}),
}
if name := query.Get("name"); name != "" {
filters.Filters["name"] = name
}
if metricType := query.Get("metric_type"); metricType != "" {
filters.Filters["metric_type"] = metricType
}
if dashboardIDs := query["dashboard_ids"]; len(dashboardIDs) > 0 {
// Parse dashboard_ids into []int
var ids []int
for _, id := range dashboardIDs {
if val, err := strconv.Atoi(id); err == nil {
ids = append(ids, val)
}
}
filters.Filters["dashboard_ids"] = ids
}
// Sorting
sort := models.CardListSort{
Field: query.Get("sort_field"),
Order: query.Get("sort_order"),
}
if sort.Field == "" {
sort.Field = "created_at" // Default sort field
}
if sort.Order == "" {
sort.Order = "desc" // Default sort order
}
// Pagination
limit := 10 // Default limit
page := 1 // Default page number
if val := query.Get("limit"); val != "" {
if l, err := strconv.Atoi(val); err == nil && l > 0 {
limit = l
}
}
if val := query.Get("page"); val != "" {
if p, err := strconv.Atoi(val); err == nil && p > 0 {
page = p
}
}
offset := (page - 1) * limit
// Validate inputs
if err := models.ValidateStruct(filters); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, fmt.Errorf("invalid filters: %w", err), startTime, r.URL.Path, bodySize)
return
}
if err := models.ValidateStruct(sort); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, fmt.Errorf("invalid sort: %w", err), startTime, r.URL.Path, bodySize)
return
}
// Call the service
resp, err := e.service.GetCardsPaginated(projectID, filters, sort, limit, offset)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
// Respond with JSON
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
@ -155,7 +187,13 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
id, err := getCardId(r)
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
cardId, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
@ -168,7 +206,7 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) {
}
bodySize = len(bodyBytes)
req := &CardUpdateRequest{}
req := &models.CardUpdateRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
@ -181,25 +219,11 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) {
return
}
// TODO update card in DB
resp := &CardGetResponse{
Card: Card{
CardID: id,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
DeletedAt: nil,
EditedAt: nil,
ProjectID: 1,
UserID: 1,
CardBase: CardBase{
Name: req.Name,
IsPublic: req.IsPublic,
Thumbnail: req.Thumbnail,
MetricType: req.MetricType,
MetricOf: req.MetricOf,
},
},
currentUser := r.Context().Value("userData").(*user.User)
resp, err := e.service.UpdateCard(projectID, int64(cardId), currentUser.ID, req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
@ -209,13 +233,24 @@ func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
_, err := getCardId(r)
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
// TODO delete card from DB
cardId, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
currentUser := r.Context().Value("userData").(*user.User)
err = e.service.DeleteCard(projectID, int64(cardId), currentUser.ID)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, nil, startTime, r.URL.Path, bodySize)
}
@ -224,6 +259,12 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request)
startTime := time.Now()
bodySize := 0
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
@ -231,7 +272,7 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request)
}
bodySize = len(bodyBytes)
req := &GetCardChartDataRequest{}
req := &models.GetCardChartDataRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
@ -239,26 +280,13 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request)
validate := validator.New()
err = validate.Struct(req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
// TODO get card chart data from ClickHouse
jsonInput := `
{
"data": [
{
"timestamp": 1733934939000,
"Series A": 100,
"Series B": 200
},
{
"timestamp": 1733935939000,
"Series A": 150,
"Series B": 250
}
]
}`
var resp GetCardChartDataResponse
err = json.Unmarshal([]byte(jsonInput), &resp)
currentUser := r.Context().Value("userData").(*user.User)
resp, err := e.service.GetCardChartData(projectID, currentUser.ID, req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return

View file

@ -1,4 +1,4 @@
package models
package api
import (
"time"

View file

@ -1,31 +1,15 @@
package models
package api
import (
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"github.com/go-playground/validator/v10"
"net/http"
"openreplay/backend/pkg/analytics/api/models"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/user"
"strconv"
"time"
)
func getDashboardId(r *http.Request) (int, error) {
vars := mux.Vars(r)
idStr := vars["id"]
if idStr == "" {
return 0, fmt.Errorf("invalid dashboard ID")
}
id, err := strconv.Atoi(idStr)
if err != nil {
return 0, fmt.Errorf("invalid dashboard ID")
}
return id, nil
}
func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
@ -37,24 +21,27 @@ func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) {
}
bodySize = len(bodyBytes)
req := &CreateDashboardRequest{}
req := &models.CreateDashboardRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: 1,
Name: req.Name,
Description: req.Description,
IsPublic: req.IsPublic,
IsPinned: req.IsPinned,
},
validate := validator.New()
err = validate.Struct(req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
currentUser := r.Context().Value("userData").(*user.User)
e.log.Info(r.Context(), "User ID: ", currentUser.ID)
resp, err := e.service.CreateDashboard(projectID, currentUser.ID, req)
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
@ -64,23 +51,17 @@ func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardsResponse{
Dashboards: []Dashboard{
{
DashboardID: 1,
Name: "Dashboard",
Description: "Description",
IsPublic: true,
IsPinned: false,
},
},
Total: 1,
u := r.Context().Value("userData").(*user.User)
resp, err := e.service.GetDashboards(projectID, u.ID)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
@ -90,34 +71,50 @@ func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
id, err := getDashboardId(r)
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: id,
Name: "Dashboard",
Description: "Description",
IsPublic: true,
IsPinned: false,
},
dashboardID, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
u := r.Context().Value("userData").(*user.User)
res, err := e.service.GetDashboard(projectID, dashboardID, u.ID)
if err != nil {
// Map errors to appropriate HTTP status codes
if err.Error() == "not_found: dashboard not found" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize)
} else if err.Error() == "access_denied: user does not have access" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, err, startTime, r.URL.Path, bodySize)
} else {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
}
return
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, res, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
dashboardID, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
if err != nil {
@ -126,21 +123,28 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) {
}
bodySize = len(bodyBytes)
req := &UpdateDashboardRequest{}
u := r.Context().Value("userData").(*user.User)
_, err = e.service.GetDashboard(projectID, dashboardID, u.ID)
if err != nil {
// Map errors to appropriate HTTP status codes
if err.Error() == "not_found: dashboard not found" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize)
} else if err.Error() == "access_denied: user does not have access" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, err, startTime, r.URL.Path, bodySize)
} else {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
}
return
}
req := &models.UpdateDashboardRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: 1,
Name: req.Name,
Description: req.Description,
IsPublic: req.IsPublic,
IsPinned: req.IsPinned,
},
}
currentUser := r.Context().Value("userData").(*user.User)
resp, err := e.service.UpdateDashboard(projectID, dashboardID, currentUser.ID, req)
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
@ -149,12 +153,37 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.log.Info(r.Context(), "Dashboard deleted")
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
dashboardID, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
u := r.Context().Value("userData").(*user.User)
_, err = e.service.GetDashboard(projectID, dashboardID, u.ID)
if err != nil {
// Map errors to appropriate HTTP status codes
if err.Error() == "not_found: dashboard not found" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize)
} else if err.Error() == "access_denied: user does not have access" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, err, startTime, r.URL.Path, bodySize)
} else {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
}
return
}
err = e.service.DeleteDashboard(projectID, dashboardID, u.ID)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}
@ -163,12 +192,6 @@ func (e *handlersImpl) pinDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.log.Info(r.Context(), "Dashboard pinned")
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
@ -179,13 +202,52 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
e.log.Info(r.Context(), "Card added to dashboard")
dashboardID, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
u := r.Context().Value("userData").(*user.User)
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
return
}
bodySize = len(bodyBytes)
req := &models.AddCardToDashboardRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
validate := validator.New()
err = validate.Struct(req)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
err = e.service.AddCardsToDashboard(projectID, dashboardID, u.ID, req)
if err != nil {
if err.Error() == "not_found: dashboard not found" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize)
} else if err.Error() == "access_denied: user does not have access" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, err, startTime, r.URL.Path, bodySize)
} else {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
}
return
}
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}
@ -195,11 +257,41 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re
startTime := time.Now()
bodySize := 0
//id, err := getDashboardId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
projectID, err := getIDFromRequest(r, "projectId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
dashboardID, err := getIDFromRequest(r, "id")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
cardID, err := getIDFromRequest(r, "cardId")
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
u := r.Context().Value("userData").(*user.User)
_, err = e.service.GetDashboard(projectID, dashboardID, u.ID)
if err != nil {
if err.Error() == "not_found: dashboard not found" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize)
} else if err.Error() == "access_denied: user does not have access" {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, err, startTime, r.URL.Path, bodySize)
} else {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
}
}
err = e.service.DeleteCardFromDashboard(dashboardID, cardID)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize)
return
}
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}

View file

@ -1,20 +1,22 @@
package models
package api
import (
"fmt"
"net/http"
"strconv"
"github.com/gorilla/mux"
config "openreplay/backend/internal/config/analytics"
"openreplay/backend/pkg/analytics/service"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/keys"
)
type handlersImpl struct {
log logger.Logger
responser *api.Responser
objStorage objectstorage.ObjectStorage
jsonSizeLimit int64
keys keys.Keys
service service.Service
}
@ -25,8 +27,10 @@ func (e *handlersImpl) GetAll() []*api.Description {
{"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"},
{"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"},
{"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"},
{"/v1/analytics/{projectId}/dashboards/{id}/cards", e.addCardToDashboard, "POST"},
{"/v1/analytics/{projectId}/dashboards/{id}/cards/{cardId}", e.removeCardFromDashboard, "DELETE"},
{"/v1/analytics/{projectId}/cards", e.createCard, "POST"},
{"/v1/analytics/{projectId}/cards", e.getCards, "GET"},
{"/v1/analytics/{projectId}/cards", e.getCardsPaginated, "GET"},
{"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"},
{"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"},
{"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"},
@ -35,13 +39,26 @@ func (e *handlersImpl) GetAll() []*api.Description {
}
}
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, objStore objectstorage.ObjectStorage, keys keys.Keys, service service.Service) (api.Handlers, error) {
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, service service.Service) (api.Handlers, error) {
return &handlersImpl{
log: log,
responser: responser,
objStorage: objStore,
jsonSizeLimit: cfg.JsonSizeLimit,
keys: keys,
service: service,
}, nil
}
func getIDFromRequest(r *http.Request, key string) (int, error) {
vars := mux.Vars(r)
idStr := vars[key]
if idStr == "" {
return 0, fmt.Errorf("missing %s in request", key)
}
id, err := strconv.Atoi(idStr)
if err != nil {
return 0, fmt.Errorf("invalid %s format", key)
}
return id, nil
}

View file

@ -0,0 +1,212 @@
package models
import (
"github.com/go-playground/validator/v10"
"strings"
"time"
)
// CardBase Common fields for the Card entity
type CardBase struct {
Name string `json:"name" validate:"required"`
IsPublic bool `json:"isPublic" validate:"omitempty"`
DefaultConfig map[string]any `json:"defaultConfig"`
Config map[string]any `json:"config"`
Thumbnail *string `json:"thumbnail" validate:"omitempty,url"`
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"`
MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"`
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"`
MetricValue []string `json:"metricValue" validate:"omitempty"`
SessionID *int64 `json:"sessionId" validate:"omitempty"`
Series []CardSeriesBase `json:"series" validate:"required,dive"`
}
// Card Fields specific to database operations
type Card struct {
CardBase
ProjectID int64 `json:"projectId" validate:"required"`
UserID int64 `json:"userId" validate:"required"`
CardID int64 `json:"cardId"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt *time.Time `json:"deleted_at,omitempty"`
EditedAt *time.Time `json:"edited_at,omitempty"`
}
type CardSeriesBase struct {
Name string `json:"name" validate:"required"`
CreatedAt time.Time `json:"createdAt" validate:"omitempty"`
DeletedAt *time.Time `json:"deletedAt" validate:"omitempty"`
Index int64 `json:"index" validate:"required"`
Filter SeriesFilter `json:"filter"`
}
type CardSeries struct {
SeriesID int64 `json:"seriesId" validate:"omitempty"`
MetricID int64 `json:"metricId" validate:"omitempty"`
CardSeriesBase
}
type SeriesFilter struct {
EventOrder string `json:"eventOrder" validate:"required,oneof=then or and"`
Filters []FilterItem `json:"filters"`
}
type FilterItem struct {
Type string `json:"type" validate:"required"`
Operator string `json:"operator" validate:"required"`
Source string `json:"source" validate:"required"`
SourceOperator string `json:"sourceOperator" validate:"required"`
Value []string `json:"value" validate:"required,dive,required"`
IsEvent bool `json:"isEvent"`
}
// CardCreateRequest Fields required for creating a card (from the frontend)
type CardCreateRequest struct {
CardBase
}
type CardGetResponse struct {
Card
Series []CardSeries `json:"series"`
}
type CardUpdateRequest struct {
CardBase
}
type GetCardsResponse struct {
Cards []Card `json:"cards"`
}
type GetCardsResponsePaginated struct {
Cards []Card `json:"cards"`
Total int `json:"total"`
}
type DataPoint struct {
Timestamp int64 `json:"timestamp"`
Series map[string]int64 `json:"series"`
}
type GetCardChartDataRequest struct {
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"`
MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"`
ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"`
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
SessionID int64 `json:"sessionId"`
Series []CardSeries `json:"series" validate:"required,dive"`
}
type GetCardChartDataResponse struct {
Data []DataPoint `json:"data"`
}
/************************************************************
* CardListFilter and Sorter
*/
// Supported filters, fields, and orders
var (
SupportedFilterKeys = map[string]bool{
"name": true,
"metric_type": true,
"dashboard_ids": true,
}
SupportedSortFields = map[string]string{
"name": "m.name",
"created_at": "m.created_at",
"metric_type": "m.metric_type",
}
SupportedSortOrders = map[string]bool{
"asc": true,
"desc": true,
}
)
// CardListFilter holds filtering criteria for listing cards.
type CardListFilter struct {
// Keys: "name" (string), "metric_type" (string), "dashboard_ids" ([]int)
Filters map[string]interface{} `validate:"supportedFilters"`
}
// CardListSort holds sorting criteria.
type CardListSort struct {
Field string `validate:"required,supportedSortField"`
Order string `validate:"required,supportedSortOrder"`
}
// Validator singleton
var validate *validator.Validate
func GetValidator() *validator.Validate {
if validate == nil {
validate = validator.New()
// Register custom validations
_ = validate.RegisterValidation("supportedFilters", supportedFiltersValidator)
_ = validate.RegisterValidation("supportedSortField", supportedSortFieldValidator)
_ = validate.RegisterValidation("supportedSortOrder", supportedSortOrderValidator)
}
return validate
}
func ValidateStruct(obj interface{}) error {
return GetValidator().Struct(obj)
}
// Custom validations
func supportedFiltersValidator(fl validator.FieldLevel) bool {
filters, ok := fl.Field().Interface().(map[string]interface{})
if !ok {
return false
}
for k := range filters {
if !SupportedFilterKeys[k] {
return false
}
}
return true
}
func supportedSortFieldValidator(fl validator.FieldLevel) bool {
field := strings.ToLower(fl.Field().String())
_, ok := SupportedSortFields[field]
return ok
}
func supportedSortOrderValidator(fl validator.FieldLevel) bool {
order := strings.ToLower(fl.Field().String())
return SupportedSortOrders[order]
}
// Filter helpers
func (f *CardListFilter) GetNameFilter() *string {
if val, ok := f.Filters["name"].(string); ok && val != "" {
return &val
}
return nil
}
func (f *CardListFilter) GetMetricTypeFilter() *string {
if val, ok := f.Filters["metric_type"].(string); ok && val != "" {
return &val
}
return nil
}
func (f *CardListFilter) GetDashboardIDs() []int {
if val, ok := f.Filters["dashboard_ids"].([]int); ok && len(val) > 0 {
return val
}
return nil
}
// Sort helpers
func (s *CardListSort) GetSQLField() string {
return SupportedSortFields[strings.ToLower(s.Field)]
}
func (s *CardListSort) GetSQLOrder() string {
return strings.ToUpper(s.Order)
}

View file

@ -1,11 +1,16 @@
package models
type Dashboard struct {
DashboardID int `json:"dashboard_id"`
Name string `json:"name"`
Description string `json:"description"`
IsPublic bool `json:"is_public"`
IsPinned bool `json:"is_pinned"`
DashboardID int `json:"dashboardId"`
ProjectID int `json:"projectId"`
UserID int `json:"userId"`
Name string `json:"name"`
Description string `json:"description"`
IsPublic bool `json:"isPublic"`
IsPinned bool `json:"isPinned"`
OwnerEmail string `json:"ownerEmail"`
OwnerName string `json:"ownerName"`
Metrics []CardBase `json:"cards"`
}
type CreateDashboardResponse struct {
@ -16,16 +21,20 @@ type GetDashboardResponse struct {
Dashboard
}
type GetDashboardsResponse struct {
type GetDashboardsResponsePaginated struct {
Dashboards []Dashboard `json:"dashboards"`
Total uint64 `json:"total"`
}
type GetDashboardsResponse struct {
Dashboards []Dashboard `json:"dashboards"`
}
// REQUESTS
type CreateDashboardRequest struct {
Name string `json:"name"`
Description string `json:"description"`
Name string `json:"name" validate:"required,min=3,max=150"`
Description string `json:"description" validate:"max=500"`
IsPublic bool `json:"is_public"`
IsPinned bool `json:"is_pinned"`
Metrics []int `json:"metrics"`
@ -34,9 +43,10 @@ type CreateDashboardRequest struct {
type GetDashboardsRequest struct {
Page uint64 `json:"page"`
Limit uint64 `json:"limit"`
IsPublic bool `json:"is_public"`
Order string `json:"order"`
Query string `json:"query"`
FilterBy string `json:"filterBy"`
OrderBy string `json:"orderBy"`
}
type UpdateDashboardRequest struct {
@ -52,9 +62,6 @@ type PinDashboardRequest struct {
}
type AddCardToDashboardRequest struct {
CardIDs []int `json:"card_ids"`
}
type DeleteCardFromDashboardRequest struct {
CardIDs []int `json:"card_ids"`
MetricIDs []int `json:"metric_ids" validate:"required,min=1,dive,gt=0"`
Config map[string]interface{} `json:"config"` // Optional
}

View file

@ -1,8 +1,6 @@
package analytics
import (
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server/tracer"
"time"
"openreplay/backend/internal/config/analytics"
@ -10,11 +8,11 @@ import (
"openreplay/backend/pkg/analytics/service"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage/store"
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/auth"
"openreplay/backend/pkg/server/keys"
"openreplay/backend/pkg/server/limiter"
"openreplay/backend/pkg/server/tracer"
)
type ServicesBuilder struct {
@ -25,12 +23,6 @@ type ServicesBuilder struct {
}
func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, pgconn pool.Pool) (*ServicesBuilder, error) {
objStore, err := store.NewStore(&cfg.ObjectsConfig)
if err != nil {
return nil, err
}
newKeys := keys.NewKeys(log, pgconn)
responser := api.NewResponser(webMetrics)
audiTrail, err := tracer.NewTracer(log, pgconn)
@ -38,18 +30,18 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
return nil, err
}
analyticsService, err := service.NewService(log, pgconn, objStore)
analyticsService, err := service.NewService(log, pgconn)
if err != nil {
return nil, err
}
handlers, err := analyticsAPI.NewHandlers(log, cfg, responser, objStore, keys.NewKeys(log, pgconn), analyticsService)
handlers, err := analyticsAPI.NewHandlers(log, cfg, responser, analyticsService)
if err != nil {
return nil, err
}
return &ServicesBuilder{
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, newKeys),
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil),
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
AuditTrail: audiTrail,
AnalyticsAPI: handlers,

View file

@ -1,34 +1,50 @@
package service
import (
"context"
"errors"
"openreplay/backend/pkg/analytics/api/models"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage"
)
type Service interface {
GetDashboard(projectId int, dashboardId int, userId uint64) (*models.GetDashboardResponse, error)
GetDashboardsPaginated(projectId int, userId uint64, req *models.GetDashboardsRequest) (*models.GetDashboardsResponsePaginated, error)
GetDashboards(projectId int, userId uint64) (*models.GetDashboardsResponse, error)
CreateDashboard(projectId int, userId uint64, req *models.CreateDashboardRequest) (*models.GetDashboardResponse, error)
UpdateDashboard(projectId int, dashboardId int, userId uint64, req *models.UpdateDashboardRequest) (*models.GetDashboardResponse, error)
DeleteDashboard(projectId int, dashboardId int, userId uint64) error
AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *models.AddCardToDashboardRequest) error
DeleteCardFromDashboard(dashboardId int, cardId int) error
GetCard(projectId int, cardId int) (*models.CardGetResponse, error)
GetCardWithSeries(projectId int, cardId int) (*models.CardGetResponse, error)
GetCards(projectId int) (*models.GetCardsResponse, error)
GetCardsPaginated(projectId int, filters models.CardListFilter, sort models.CardListSort, limit int, offset int) (*models.GetCardsResponsePaginated, error)
CreateCard(projectId int, userId uint64, req *models.CardCreateRequest) (*models.CardGetResponse, error)
UpdateCard(projectId int, cardId int64, userId uint64, req *models.CardUpdateRequest) (*models.CardGetResponse, error)
DeleteCard(projectId int, cardId int64, userId uint64) error
GetCardChartData(projectId int, userId uint64, req *models.GetCardChartDataRequest) ([]models.DataPoint, error)
}
type serviceImpl struct {
log logger.Logger
conn pool.Pool
storage objectstorage.ObjectStorage
log logger.Logger
pgconn pool.Pool
ctx context.Context
}
func NewService(log logger.Logger, conn pool.Pool, storage objectstorage.ObjectStorage) (Service, error) {
func NewService(log logger.Logger, conn pool.Pool) (Service, error) {
switch {
case log == nil:
return nil, errors.New("logger is empty")
case conn == nil:
return nil, errors.New("connection pool is empty")
case storage == nil:
return nil, errors.New("object storage is empty")
}
return &serviceImpl{
log: log,
conn: conn,
storage: storage,
log: log,
pgconn: conn,
ctx: context.Background(),
}, nil
}

View file

@ -0,0 +1,428 @@
package service
import (
"context"
"encoding/json"
"fmt"
"github.com/jackc/pgx/v4"
"github.com/lib/pq"
"openreplay/backend/pkg/analytics/api/models"
"strings"
)
func (s *serviceImpl) CreateCard(projectId int, userID uint64, req *models.CardCreateRequest) (*models.CardGetResponse, error) {
if req.MetricValue == nil {
req.MetricValue = []string{}
}
tx, err := s.pgconn.Begin() // Start transaction
if err != nil {
return nil, fmt.Errorf("failed to start transaction: %w", err)
}
ctx := context.Background()
defer func() {
if err != nil {
tx.Rollback(ctx)
if err != nil {
return
}
} else {
err := tx.Commit(ctx)
if err != nil {
return
}
}
}()
// Insert the card
sql := `
INSERT INTO public.metrics (project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at`
card := &models.CardGetResponse{}
err = tx.QueryRow(
ctx, sql,
projectId, userID, req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic,
).Scan(
&card.CardID,
&card.ProjectID,
&card.UserID,
&card.Name,
&card.MetricType,
&card.ViewType,
&card.MetricOf,
&card.MetricValue,
&card.MetricFormat,
&card.IsPublic,
&card.CreatedAt,
&card.EditedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to create card: %w", err)
}
// Create series for the card
seriesList := s.CreateSeries(ctx, tx, card.CardID, req.Series)
if len(seriesList) != len(req.Series) {
return nil, fmt.Errorf("not all series were created successfully")
}
card.Series = seriesList
return card, nil
}
func (s *serviceImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int64, series []models.CardSeriesBase) []models.CardSeries {
if len(series) == 0 {
return nil // No series to create
}
// Batch insert for better performance
sql := `
INSERT INTO public.metric_series (metric_id, name, index, filter) VALUES %s
RETURNING series_id, metric_id, name, index, filter`
// Generate the VALUES placeholders dynamically
var values []string
var args []interface{}
for i, ser := range series {
values = append(values, fmt.Sprintf("($%d, $%d, $%d, $%d)", i*4+1, i*4+2, i*4+3, i*4+4))
filterJSON, err := json.Marshal(ser.Filter) // Convert struct to JSON
if err != nil {
s.log.Error(ctx, "failed to serialize filter to JSON: %v", err)
return nil
}
fmt.Println(string(filterJSON))
args = append(args, metricId, ser.Name, i, string(filterJSON))
}
query := fmt.Sprintf(sql, strings.Join(values, ","))
s.log.Info(ctx, "Executing query: %s with args: %v", query, args)
rows, err := tx.Query(ctx, query, args...)
if err != nil {
s.log.Error(ctx, "failed to execute batch insert for series: %v", err)
return nil
}
defer rows.Close()
if rows.Err() != nil {
s.log.Error(ctx, "Query returned an error: %v", rows.Err())
return nil
}
// Collect inserted series
var seriesList []models.CardSeries
for rows.Next() {
cardSeries := models.CardSeries{}
if err := rows.Scan(&cardSeries.SeriesID, &cardSeries.MetricID, &cardSeries.Name, &cardSeries.Index, &cardSeries.Filter); err != nil {
s.log.Error(ctx, "failed to scan series: %v", err)
continue
}
seriesList = append(seriesList, cardSeries)
}
return seriesList
}
func (s *serviceImpl) GetCard(projectId int, cardID int) (*models.CardGetResponse, error) {
sql :=
`SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at
FROM public.metrics
WHERE metric_id = $1 AND project_id = $2 AND deleted_at IS NULL`
card := &models.CardGetResponse{}
err := s.pgconn.QueryRow(sql, cardID, projectId).Scan(
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf, &card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to get card: %w", err)
}
return card, nil
}
func (s *serviceImpl) GetCardWithSeries(projectId int, cardID int) (*models.CardGetResponse, error) {
sql := `
SELECT m.metric_id, m.project_id, m.user_id, m.name, m.metric_type, m.view_type, m.metric_of,
m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at,
COALESCE(
json_agg(
json_build_object(
'seriesId', ms.series_id,
'index', ms.index,
'name', ms.name,
'filter', ms.filter
)
) FILTER (WHERE ms.series_id IS NOT NULL), '[]'
) AS series
FROM public.metrics m
LEFT JOIN public.metric_series ms ON m.metric_id = ms.metric_id
WHERE m.metric_id = $1 AND m.project_id = $2 AND m.deleted_at IS NULL
GROUP BY m.metric_id, m.project_id, m.user_id, m.name, m.metric_type, m.view_type,
m.metric_of, m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at
`
card := &models.CardGetResponse{}
var seriesJSON []byte
err := s.pgconn.QueryRow(sql, cardID, projectId).Scan(
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf,
&card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt, &seriesJSON,
)
if err != nil {
return nil, fmt.Errorf("failed to get card: %w", err)
}
if err := json.Unmarshal(seriesJSON, &card.Series); err != nil {
return nil, fmt.Errorf("failed to unmarshal series: %w", err)
}
return card, nil
}
func (s *serviceImpl) GetCards(projectId int) (*models.GetCardsResponse, error) {
sql := `
SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at
FROM public.metrics
WHERE project_id = $1 AND deleted_at IS NULL`
rows, err := s.pgconn.Query(sql, projectId)
if err != nil {
return nil, fmt.Errorf("failed to get cards: %w", err)
}
defer rows.Close()
cards := make([]models.Card, 0)
for rows.Next() {
card := models.Card{}
if err := rows.Scan(
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf,
&card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt,
); err != nil {
return nil, fmt.Errorf("failed to scan card: %w", err)
}
cards = append(cards, card)
}
return &models.GetCardsResponse{Cards: cards}, nil
}
func (s *serviceImpl) GetCardsPaginated(
projectId int,
filters models.CardListFilter,
sort models.CardListSort,
limit,
offset int,
) (*models.GetCardsResponsePaginated, error) {
// Validate inputs
if err := models.ValidateStruct(filters); err != nil {
return nil, fmt.Errorf("invalid filters: %w", err)
}
if err := models.ValidateStruct(sort); err != nil {
return nil, fmt.Errorf("invalid sort: %w", err)
}
var (
conditions []string
params []interface{}
paramIndex = 1
)
// Project ID is mandatory
conditions = append(conditions, fmt.Sprintf("m.project_id = $%d", paramIndex))
params = append(params, projectId)
paramIndex++
// Apply filters
if nameFilter := filters.GetNameFilter(); nameFilter != nil {
conditions = append(conditions, fmt.Sprintf("m.name ILIKE $%d", paramIndex))
params = append(params, "%"+*nameFilter+"%")
paramIndex++
}
if typeFilter := filters.GetMetricTypeFilter(); typeFilter != nil {
conditions = append(conditions, fmt.Sprintf("m.metric_type = $%d", paramIndex))
params = append(params, *typeFilter)
paramIndex++
}
var joinClause string
if dashboardIDs := filters.GetDashboardIDs(); len(dashboardIDs) > 0 {
joinClause = "LEFT JOIN public.dashboard_widgets dw ON m.metric_id = dw.metric_id"
conditions = append(conditions, fmt.Sprintf("dw.dashboard_id = ANY($%d)", paramIndex))
params = append(params, pq.Array(dashboardIDs))
paramIndex++
}
// Exclude deleted
conditions = append(conditions, "m.deleted_at IS NULL")
whereClause := "WHERE " + strings.Join(conditions, " AND ")
orderClause := fmt.Sprintf("ORDER BY %s %s", sort.GetSQLField(), sort.GetSQLOrder())
limitClause := fmt.Sprintf("LIMIT $%d", paramIndex)
params = append(params, limit)
paramIndex++
offsetClause := fmt.Sprintf("OFFSET $%d", paramIndex)
params = append(params, offset)
paramIndex++
// Main query
query := fmt.Sprintf(`
SELECT m.metric_id, m.project_id, m.user_id, m.name, m.metric_type, m.view_type, m.metric_of,
m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at
FROM public.metrics m
%s
%s
%s
%s
%s
`, joinClause, whereClause, orderClause, limitClause, offsetClause)
rows, err := s.pgconn.Query(query, params...)
if err != nil {
return nil, fmt.Errorf("failed to get cards: %w", err)
}
defer rows.Close()
var cards []models.Card
for rows.Next() {
var card models.Card
if err := rows.Scan(
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf,
&card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt,
); err != nil {
return nil, fmt.Errorf("failed to scan card: %w", err)
}
cards = append(cards, card)
}
// Count total (exclude limit, offset, order)
countParams := params[0 : len(params)-2] // all filter params without limit/offset
countQuery := fmt.Sprintf(`
SELECT COUNT(*)
FROM public.metrics m
%s
%s
`, joinClause, whereClause)
var total int
if err := s.pgconn.QueryRow(countQuery, countParams...).Scan(&total); err != nil {
return nil, fmt.Errorf("failed to get total count: %w", err)
}
return &models.GetCardsResponsePaginated{
Cards: cards,
Total: total,
}, nil
}
func (s *serviceImpl) UpdateCard(projectId int, cardID int64, userID uint64, req *models.CardUpdateRequest) (*models.CardGetResponse, error) {
if req.MetricValue == nil {
req.MetricValue = []string{}
}
tx, err := s.pgconn.Begin() // Start transaction
if err != nil {
return nil, fmt.Errorf("failed to start transaction: %w", err)
}
ctx := context.Background()
defer func() {
if err != nil {
tx.Rollback(ctx)
if err != nil {
return
}
} else {
err := tx.Commit(ctx)
if err != nil {
return
}
}
}()
// Update the card
sql := `
UPDATE public.metrics
SET name = $1, metric_type = $2, view_type = $3, metric_of = $4, metric_value = $5, metric_format = $6, is_public = $7
WHERE metric_id = $8 AND project_id = $9 AND deleted_at IS NULL
RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at`
card := &models.CardGetResponse{}
err = tx.QueryRow(ctx, sql,
req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic, cardID, projectId,
).Scan(
&card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf,
&card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to update card: %w", err)
}
// delete all series for the card and create new ones
err = s.DeleteCardSeries(card.CardID)
if err != nil {
return nil, fmt.Errorf("failed to delete series: %w", err)
}
seriesList := s.CreateSeries(ctx, tx, card.CardID, req.Series)
if len(seriesList) != len(req.Series) {
return nil, fmt.Errorf("not all series were created successfully")
}
card.Series = seriesList
return card, nil
}
func (s *serviceImpl) DeleteCardSeries(cardId int64) error {
sql := `DELETE FROM public.metric_series WHERE metric_id = $1`
err := s.pgconn.Exec(sql, cardId)
if err != nil {
return fmt.Errorf("failed to delete series: %w", err)
}
return nil
}
func (s *serviceImpl) DeleteCard(projectId int, cardID int64, userID uint64) error {
sql := `
UPDATE public.metrics
SET deleted_at = now()
WHERE metric_id = $1 AND project_id = $2 AND user_id = $3 AND deleted_at IS NULL`
err := s.pgconn.Exec(sql, cardID, projectId, userID)
if err != nil {
return fmt.Errorf("failed to delete card: %w", err)
}
return nil
}
func (s *serviceImpl) GetCardChartData(projectId int, userID uint64, req *models.GetCardChartDataRequest) ([]models.DataPoint, error) {
jsonInput := `
{
"data": [
{
"timestamp": 1733934939000,
"Series A": 100,
"Series B": 200
},
{
"timestamp": 1733935939000,
"Series A": 150,
"Series B": 250
}
]
}`
var resp models.GetCardChartDataResponse
if err := json.Unmarshal([]byte(jsonInput), &resp); err != nil {
return nil, fmt.Errorf("failed to unmarshal response: %w", err)
}
return resp.Data, nil
}

View file

@ -0,0 +1,381 @@
package service
import (
"context"
"encoding/json"
"errors"
"fmt"
"openreplay/backend/pkg/analytics/api/models"
)
// CreateDashboard Create a new dashboard
func (s *serviceImpl) CreateDashboard(projectId int, userID uint64, req *models.CreateDashboardRequest) (*models.GetDashboardResponse, error) {
sql := `
INSERT INTO dashboards (project_id, user_id, name, description, is_public, is_pinned)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING dashboard_id, project_id, user_id, name, description, is_public, is_pinned`
dashboard := &models.GetDashboardResponse{}
err := s.pgconn.QueryRow(sql, projectId, userID, req.Name, req.Description, req.IsPublic, req.IsPinned).Scan(
&dashboard.DashboardID,
&dashboard.ProjectID,
&dashboard.UserID,
&dashboard.Name,
&dashboard.Description,
&dashboard.IsPublic,
&dashboard.IsPinned,
)
if err != nil {
return nil, fmt.Errorf("failed to create dashboard: %w", err)
}
return dashboard, nil
}
// GetDashboard Fetch a specific dashboard by ID
func (s *serviceImpl) GetDashboard(projectId int, dashboardID int, userID uint64) (*models.GetDashboardResponse, error) {
sql := `
WITH series_agg AS (
SELECT
ms.metric_id,
json_agg(
json_build_object(
'index', ms.index,
'name', ms.name,
'filter', ms.filter
)
) AS series
FROM metric_series ms
GROUP BY ms.metric_id
)
SELECT
d.dashboard_id,
d.project_id,
d.name,
d.description,
d.is_public,
d.is_pinned,
d.user_id,
COALESCE(json_agg(
json_build_object(
'config', dw.config,
'metric_id', m.metric_id,
'name', m.name,
'metric_type', m.metric_type,
'view_type', m.view_type,
'metric_of', m.metric_of,
'metric_value', m.metric_value,
'metric_format', m.metric_format,
'series', s.series
)
) FILTER (WHERE m.metric_id IS NOT NULL), '[]') AS metrics
FROM dashboards d
LEFT JOIN dashboard_widgets dw ON d.dashboard_id = dw.dashboard_id
LEFT JOIN metrics m ON dw.metric_id = m.metric_id
LEFT JOIN series_agg s ON m.metric_id = s.metric_id
WHERE d.dashboard_id = $1 AND d.project_id = $2 AND d.deleted_at IS NULL
GROUP BY d.dashboard_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned, d.user_id`
dashboard := &models.GetDashboardResponse{}
var ownerID int
var metricsJSON []byte
err := s.pgconn.QueryRow(sql, dashboardID, projectId).Scan(
&dashboard.DashboardID,
&dashboard.ProjectID,
&dashboard.Name,
&dashboard.Description,
&dashboard.IsPublic,
&dashboard.IsPinned,
&ownerID,
&metricsJSON,
)
if err != nil {
if err.Error() == "no rows in result set" {
return nil, errors.New("not_found: dashboard not found")
}
return nil, fmt.Errorf("error fetching dashboard: %w", err)
}
if err := json.Unmarshal(metricsJSON, &dashboard.Metrics); err != nil {
return nil, fmt.Errorf("error unmarshalling metrics: %w", err)
}
if !dashboard.IsPublic && uint64(ownerID) != userID {
return nil, fmt.Errorf("access_denied: user does not have access")
}
return dashboard, nil
}
func (s *serviceImpl) GetDashboards(projectId int, userID uint64) (*models.GetDashboardsResponse, error) {
sql := `
SELECT d.dashboard_id, d.user_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned, u.email AS owner_email, u.name AS owner_name
FROM dashboards d
LEFT JOIN users u ON d.user_id = u.user_id
WHERE (d.is_public = true OR d.user_id = $1) AND d.user_id IS NOT NULL AND d.deleted_at IS NULL AND d.project_id = $2
ORDER BY d.dashboard_id`
rows, err := s.pgconn.Query(sql, userID, projectId)
if err != nil {
return nil, err
}
defer rows.Close()
var dashboards []models.Dashboard
for rows.Next() {
var dashboard models.Dashboard
err := rows.Scan(&dashboard.DashboardID, &dashboard.UserID, &dashboard.ProjectID, &dashboard.Name, &dashboard.Description, &dashboard.IsPublic, &dashboard.IsPinned, &dashboard.OwnerEmail, &dashboard.OwnerName)
if err != nil {
return nil, err
}
dashboards = append(dashboards, dashboard)
}
if err := rows.Err(); err != nil {
return nil, err
}
return &models.GetDashboardsResponse{
Dashboards: dashboards,
}, nil
}
// GetDashboardsPaginated Fetch dashboards with pagination
func (s *serviceImpl) GetDashboardsPaginated(projectId int, userID uint64, req *models.GetDashboardsRequest) (*models.GetDashboardsResponsePaginated, error) {
baseSQL, args := buildBaseQuery(projectId, userID, req)
// Count total dashboards
countSQL := fmt.Sprintf("SELECT COUNT(*) FROM (%s) AS count_query", baseSQL)
var total uint64
err := s.pgconn.QueryRow(countSQL, args...).Scan(&total)
if err != nil {
return nil, fmt.Errorf("error counting dashboards: %w", err)
}
// Fetch paginated dashboards
paginatedSQL := fmt.Sprintf("%s ORDER BY %s %s LIMIT $%d OFFSET $%d",
baseSQL, getOrderBy(req.OrderBy), getOrder(req.Order), len(args)+1, len(args)+2)
args = append(args, req.Limit, req.Limit*(req.Page-1))
rows, err := s.pgconn.Query(paginatedSQL, args...)
if err != nil {
return nil, fmt.Errorf("error fetching paginated dashboards: %w", err)
}
defer rows.Close()
var dashboards []models.Dashboard
for rows.Next() {
var dashboard models.Dashboard
err := rows.Scan(
&dashboard.DashboardID,
&dashboard.UserID,
&dashboard.ProjectID,
&dashboard.Name,
&dashboard.Description,
&dashboard.IsPublic,
&dashboard.IsPinned,
&dashboard.OwnerEmail,
&dashboard.OwnerName,
)
if err != nil {
return nil, fmt.Errorf("error scanning dashboard: %w", err)
}
dashboards = append(dashboards, dashboard)
}
return &models.GetDashboardsResponsePaginated{
Dashboards: dashboards,
Total: total,
}, nil
}
// UpdateDashboard Update a dashboard
func (s *serviceImpl) UpdateDashboard(projectId int, dashboardID int, userID uint64, req *models.UpdateDashboardRequest) (*models.GetDashboardResponse, error) {
sql := `
UPDATE dashboards
SET name = $1, description = $2, is_public = $3, is_pinned = $4
WHERE dashboard_id = $5 AND project_id = $6 AND user_id = $7 AND deleted_at IS NULL
RETURNING dashboard_id, project_id, user_id, name, description, is_public, is_pinned`
dashboard := &models.GetDashboardResponse{}
err := s.pgconn.QueryRow(sql, req.Name, req.Description, req.IsPublic, req.IsPinned, dashboardID, projectId, userID).Scan(
&dashboard.DashboardID,
&dashboard.ProjectID,
&dashboard.UserID,
&dashboard.Name,
&dashboard.Description,
&dashboard.IsPublic,
&dashboard.IsPinned,
)
if err != nil {
return nil, fmt.Errorf("error updating dashboard: %w", err)
}
return dashboard, nil
}
// DeleteDashboard Soft-delete a dashboard
func (s *serviceImpl) DeleteDashboard(projectId int, dashboardID int, userID uint64) error {
sql := `
UPDATE dashboards
SET deleted_at = now()
WHERE dashboard_id = $1 AND project_id = $2 AND user_id = $3 AND deleted_at IS NULL`
err := s.pgconn.Exec(sql, dashboardID, projectId, userID)
if err != nil {
return fmt.Errorf("error deleting dashboard: %w", err)
}
return nil
}
// Helper to build the base query for dashboards
func buildBaseQuery(projectId int, userID uint64, req *models.GetDashboardsRequest) (string, []interface{}) {
var conditions []string
args := []interface{}{projectId}
conditions = append(conditions, "d.project_id = $1")
// Handle is_public filter
if req.IsPublic {
conditions = append(conditions, "d.is_public = true")
} else {
conditions = append(conditions, "(d.is_public = true OR d.user_id = $2)")
args = append(args, userID)
}
// Handle search query
if req.Query != "" {
conditions = append(conditions, "(d.name ILIKE $3 OR d.description ILIKE $3)")
args = append(args, "%"+req.Query+"%")
}
conditions = append(conditions, "d.deleted_at IS NULL")
whereClause := "WHERE " + fmt.Sprint(conditions)
baseSQL := fmt.Sprintf(`
SELECT d.dashboard_id, d.user_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned,
u.email AS owner_email, u.name AS owner_name
FROM dashboards d
LEFT JOIN users u ON d.user_id = u.user_id
%s`, whereClause)
return baseSQL, args
}
func getOrderBy(orderBy string) string {
if orderBy == "" {
return "d.dashboard_id"
}
allowed := map[string]bool{"dashboard_id": true, "name": true, "description": true}
if allowed[orderBy] {
return fmt.Sprintf("d.%s", orderBy)
}
return "d.dashboard_id"
}
func getOrder(order string) string {
if order == "DESC" {
return "DESC"
}
return "ASC"
}
func (s *serviceImpl) CardsExist(projectId int, cardIDs []int) (bool, error) {
sql := `
SELECT COUNT(*) FROM public.metrics
WHERE project_id = $1 AND metric_id = ANY($2)
`
var count int
err := s.pgconn.QueryRow(sql, projectId, cardIDs).Scan(&count)
if err != nil {
return false, err
}
return count == len(cardIDs), nil
}
func (s *serviceImpl) AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *models.AddCardToDashboardRequest) error {
_, err := s.GetDashboard(projectId, dashboardId, userId)
if err != nil {
return fmt.Errorf("failed to get dashboard: %w", err)
}
// Check if all cards exist
exists, err := s.CardsExist(projectId, req.MetricIDs)
if err != nil {
return fmt.Errorf("failed to check card existence: %w", err)
}
if !exists {
return errors.New("not_found: one or more cards do not exist")
}
// Begin a transaction
tx, err := s.pgconn.Begin() // Start transaction
if err != nil {
return fmt.Errorf("failed to start transaction: %w", err)
}
ctx := context.Background()
defer func() {
if err != nil {
tx.Rollback(ctx)
if err != nil {
return
}
} else {
err := tx.Commit(ctx)
if err != nil {
return
}
}
}()
// Insert metrics into dashboard_widgets
insertedWidgets := 0
for _, metricID := range req.MetricIDs {
// Check if the widget already exists
var exists bool
err := tx.QueryRow(ctx, `
SELECT EXISTS (
SELECT 1 FROM public.dashboard_widgets
WHERE dashboard_id = $1 AND metric_id = $2
)
`, dashboardId, metricID).Scan(&exists)
if err != nil {
return fmt.Errorf("failed to check existing widget: %w", err)
}
if exists {
continue // Skip duplicates
}
// Insert new widget
_, err = tx.Exec(ctx, `
INSERT INTO public.dashboard_widgets (dashboard_id, metric_id, user_id, config)
VALUES ($1, $2, $3, $4)
`, dashboardId, metricID, userId, req.Config)
if err != nil {
return fmt.Errorf("failed to insert widget: %w", err)
}
insertedWidgets++
}
// Commit transaction
if err := tx.Commit(ctx); err != nil {
return fmt.Errorf("failed to commit transaction: %w", err)
}
return nil
}
func (s *serviceImpl) DeleteCardFromDashboard(dashboardId int, cardId int) error {
sql := `DELETE FROM public.dashboard_widgets WHERE dashboard_id = $1 AND metric_id = $2`
err := s.pgconn.Exec(sql, dashboardId, cardId)
if err != nil {
return fmt.Errorf("failed to delete card from dashboard: %w", err)
}
return nil
}

View file

@ -20,6 +20,7 @@ type sentryConfig struct {
OrganizationSlug string `json:"organization_slug"`
ProjectSlug string `json:"project_slug"`
Token string `json:"token"`
URL string `json:"url"`
}
type SentryEvent struct {
@ -46,13 +47,19 @@ func (s *sentryClient) FetchSessionData(credentials interface{}, sessionID uint6
if val, ok := strCfg["token"].(string); ok {
cfg.Token = val
}
if val, ok := strCfg["url"].(string); ok {
cfg.URL = val
}
}
requestUrl := fmt.Sprintf("https://sentry.io/api/0/projects/%s/%s/events/", cfg.OrganizationSlug, cfg.ProjectSlug)
if cfg.URL == "" {
cfg.URL = "https://sentry.io" // Default to hosted Sentry if not specified
}
requestUrl := fmt.Sprintf("%s/api/0/projects/%s/%s/issues/", cfg.URL, cfg.OrganizationSlug, cfg.ProjectSlug)
testCallLimit := 1
params := url.Values{}
if sessionID != 0 {
params.Add("query", fmt.Sprintf("openReplaySession.id=%d", sessionID))
params.Add("query", fmt.Sprintf("openReplaySession.id:%d", sessionID))
} else {
params.Add("per_page", fmt.Sprintf("%d", testCallLimit))
}

View file

@ -12,12 +12,14 @@ import { toast } from 'react-toastify';
import DocLink from 'Shared/DocLink/DocLink';
interface SentryConfig {
url: string;
organization_slug: string;
project_slug: string;
token: string;
}
const initialValues = {
url: 'https://sentry.io',
organization_slug: '',
project_slug: '',
token: '',
@ -39,6 +41,9 @@ function SentryForm({
removeMutation,
} = useIntegration<SentryConfig>('sentry', siteId, initialValues);
const { values, errors, handleChange, hasErrors, checkErrors, } = useForm(data, {
url: {
required: false,
},
organization_slug: {
required: true,
},
@ -95,12 +100,19 @@ function SentryForm({
/>
<Loader loading={isPending}>
<FormField
label="URL"
name="url"
value={values.url}
onChange={handleChange}
errors={errors.url}
/>
<FormField
label="Organization Slug"
name="organization_slug"
value={values.organization_slug}
onChange={handleChange}
errors={errors.url}
errors={errors.organization_slug}
autoFocus
/>
<FormField

View file

@ -13,7 +13,8 @@ type warningsType =
| 'memoryWarning'
| 'lowDiskSpace'
| 'isLowPowerModeEnabled'
| 'batteryLevel';
| 'batteryLevel'
| 'background';
const elements = {
thermalState: {
@ -36,12 +37,16 @@ const elements = {
title: 'Low Battery',
icon: 'battery',
},
background: {
title: 'In Background',
icon: 'eye-slash'
},
} as const;
function PerfWarnings({ userDevice }: { userDevice: string }) {
const { store } = React.useContext(MobilePlayerContext);
const { uiPlayerStore } = useStore();
const { scale, performanceListNow, performanceList } = store.get()
const { scale, performanceListNow, performanceList, inBackground } = store.get()
const bottomBlock = uiPlayerStore.bottomBlock;
const allElements = Object.keys(elements) as warningsType[];
const list = React.useMemo(() => allElements
@ -65,7 +70,7 @@ function PerfWarnings({ userDevice }: { userDevice: string }) {
const activeWarnings = React.useMemo(() => {
const warnings: warningsType[] = []
performanceListNow.forEach((warn: IosPerformanceEvent & { techName: warningsType }) => {
performanceListNow.forEach((warn: MobilePerformanceEvent & { techName: warningsType }) => {
switch (warn.techName) {
case 'thermalState':
if (warn.value > 1) warnings.push(warn.techName) // 2 = serious 3 = overheating
@ -84,13 +89,22 @@ function PerfWarnings({ userDevice }: { userDevice: string }) {
break;
}
})
return warnings
}, [performanceListNow.length]);
if (bottomBlock !== NONE) return null;
return (
<div style={contStyles}>
{inBackground ? (
<div
className={cn(
'transition-all flex items-center gap-1 bg-white border rounded px-2 py-1',
'opacity-100',
)}
>
<Icon name={elements.background.icon} size={16} />
<span>{elements.background.title}</span>
</div>
) : null}
{list.map((w) => (
<div
className={cn(

View file

@ -37,7 +37,7 @@ function isElasticLog(log: any): boolean {
}
function isSentryLog(log: any): boolean {
return log && log[0].id && log[0].message && log[0].title;
return log && 'id' in log[0] && 'message' in log[0] && 'title' in log[0];
}
function processDynatraceLog(log: any): UnifiedLog {

View file

@ -35,9 +35,9 @@ export default class GQLDetails extends React.PureComponent {
</div>
</div>
<div style={{ height: 'calc(100vh - 364px)', overflowY: 'auto' }}>
<div style={{ height: 'calc(100vh - 264px)', overflowY: 'auto' }} className={'border-t border-t-gray-light mt-2 py-2'}>
<div>
<div className="flex justify-between items-start mt-6 mb-2">
<div className="flex justify-between items-start mb-2">
<h5 className="mt-1 mr-1">{'Variables'}</h5>
</div>
<div className={dataClass}>

View file

@ -37,6 +37,7 @@ const IMG = 'img';
const MEDIA = 'media';
const OTHER = 'other';
const WS = 'websocket';
const GRAPHQL = 'graphql';
const TYPE_TO_TAB = {
[ResourceType.XHR]: XHR,
@ -47,9 +48,10 @@ const TYPE_TO_TAB = {
[ResourceType.MEDIA]: MEDIA,
[ResourceType.WS]: WS,
[ResourceType.OTHER]: OTHER,
[ResourceType.GRAPHQL]: GRAPHQL,
};
const TAP_KEYS = [ALL, XHR, JS, CSS, IMG, MEDIA, OTHER, WS] as const;
const TAP_KEYS = [ALL, XHR, JS, CSS, IMG, MEDIA, OTHER, WS, GRAPHQL] as const;
export const NETWORK_TABS = TAP_KEYS.map((tab) => ({
text: tab === 'xhr' ? 'Fetch/XHR' : tab,
key: tab,

View file

@ -12,6 +12,7 @@ import Lists, {
INITIAL_STATE as LISTS_INITIAL_STATE,
State as ListsState,
} from './IOSLists';
import ListWalker from '../common/ListWalker';
import IOSPerformanceTrackManager, {
PerformanceChartPoint,
} from 'Player/mobile/managers/IOSPerformanceTrackManager';
@ -75,6 +76,7 @@ export interface State extends ScreenState, ListsState {
eventCount: number;
updateWarnings: number;
currentSnapshot: TarFile | null;
inBackground: boolean;
}
const userEvents = [
@ -100,6 +102,7 @@ export default class IOSMessageManager implements IMessageManager {
messagesProcessed: false,
messagesLoading: false,
currentSnapshot: null,
inBackground: false,
};
private activityManager: ActivityManager | null = null;
@ -110,6 +113,7 @@ export default class IOSMessageManager implements IMessageManager {
private touchManager: TouchManager;
private lists: Lists;
public snapshotManager: SnapshotManager;
private appFocusTracker = new ListWalker<{tp: 102, time: number, timestamp: number, value: number, name: string}>();
constructor(
private readonly session: Record<string, any>,
@ -198,11 +202,18 @@ export default class IOSMessageManager implements IMessageManager {
const stateToUpdate: Record<string, any> = {};
const lastPerformanceTrackMessage = this.performanceManager.moveGetLast(t);
const lastAppFocusMessage = this.appFocusTracker.moveGetLast(t);
if (lastPerformanceTrackMessage) {
Object.assign(stateToUpdate, {
performanceChartTime: lastPerformanceTrackMessage.time,
});
}
if (lastAppFocusMessage) {
console.log(lastAppFocusMessage)
Object.assign(stateToUpdate, {
inBackground: lastAppFocusMessage.value === 1,
});
}
this.touchManager.move(t);
if (
@ -237,6 +248,9 @@ export default class IOSMessageManager implements IMessageManager {
const performanceStats = ['background', 'memoryUsage', 'mainThreadCPU'];
if (performanceStats.includes(msg.name)) {
this.performanceManager.append(msg);
if (msg.name === 'background') {
this.appFocusTracker.append(msg);
}
}
if (performanceWarnings.includes(msg.name)) {
// @ts-ignore

View file

@ -42,6 +42,21 @@ export default class MessageLoader {
this.session = session
}
/**
* TODO: has to be moved out of messageLoader logic somehow
* */
spriteMapSvg: SVGElement | null = null;
potentialSpriteMap: Record<string, any> = {};
domParser: DOMParser | null = null;
createSpriteMap = () => {
if (!this.spriteMapSvg) {
this.domParser = new DOMParser();
this.spriteMapSvg = document.createElementNS("http://www.w3.org/2000/svg", "svg");
this.spriteMapSvg.setAttribute("style", "display: none;");
this.spriteMapSvg.setAttribute("id", "reconstructed-sprite");
}
}
createNewParser(
shouldDecrypt = true,
onMessagesDone: (msgs: PlayerMsg[], file?: string) => void,
@ -78,7 +93,22 @@ export default class MessageLoader {
let artificialStartTime = Infinity;
let startTimeSet = false;
msgs.forEach((msg) => {
msgs.forEach((msg, i) => {
if (msg.tp === MType.SetNodeAttribute) {
if (msg.value.includes('_$OPENREPLAY_SPRITE$_')) {
this.createSpriteMap()
if (!this.domParser) {
return console.error('DOM parser is not initialized?');
}
handleSprites(
this.potentialSpriteMap,
this.domParser,
msg,
this.spriteMapSvg!,
i
);
}
}
if (msg.tp === MType.Redux || msg.tp === MType.ReduxDeprecated) {
if ('actionTime' in msg && msg.actionTime) {
msg.time = msg.actionTime - this.session.startedAt;
@ -293,6 +323,10 @@ export default class MessageLoader {
await Promise.allSettled([restDomFilesPromise, restDevtoolsFilesPromise]);
this.messageManager.onFileReadSuccess();
// no sprites for mobile
if (this.spriteMapSvg && 'injectSpriteMap' in this.messageManager) {
this.messageManager.injectSpriteMap(this.spriteMapSvg);
}
};
loadEFSMobs = async () => {
@ -420,5 +454,27 @@ function findBrokenNodes(nodes: any[]) {
return result;
}
function handleSprites(potentialSpriteMap: Record<string, any>, parser: DOMParser, msg: Record<string, any>, spriteMapSvg: SVGElement, i: number) {
const [_, svgData] = msg.value.split('_$OPENREPLAY_SPRITE$_');
const potentialSprite = potentialSpriteMap[svgData];
if (potentialSprite) {
msg.value = potentialSprite;
} else {
const svgDoc = parser.parseFromString(svgData, "image/svg+xml");
const originalSvg = svgDoc.querySelector("svg");
if (originalSvg) {
const symbol = document.createElementNS("http://www.w3.org/2000/svg", "symbol");
const symbolId = `symbol-${msg.id || 'ind-' + i}`; // Generate an ID if missing
symbol.setAttribute("id", symbolId);
symbol.setAttribute("viewBox", originalSvg.getAttribute("viewBox") || "0 0 24 24");
symbol.innerHTML = originalSvg.innerHTML;
spriteMapSvg.appendChild(symbol);
msg.value = `#${symbolId}`;
potentialSpriteMap[svgData] = `#${symbolId}`;
}
}
}
// @ts-ignore
window.searchOrphans = (msgs) => findBrokenNodes(msgs.filter(m => [8,9,10,70].includes(m.tp)));

View file

@ -8,7 +8,7 @@ import ListWalker from '../common/ListWalker';
import MouseMoveManager from './managers/MouseMoveManager';
import ActivityManager from './managers/ActivityManager';
import TabClosingManager from "./managers/TabClosingManager";
import TabClosingManager from './managers/TabClosingManager';
import { MouseThrashing, MType } from './messages';
import type { Message, MouseClick } from './messages';
@ -52,7 +52,7 @@ export interface State extends ScreenState {
};
tabNames: {
[tabId: string]: string;
}
};
domContentLoadedTime?: { time: number; value: number };
domBuildingTime?: number;
@ -99,7 +99,7 @@ export default class MessageManager {
closedTabs: [],
sessionStart: 0,
tabNames: {},
};
};
private clickManager: ListWalker<MouseClick> = new ListWalker();
private mouseThrashingManager: ListWalker<MouseThrashing> = new ListWalker();
@ -128,7 +128,9 @@ export default class MessageManager {
this.mouseMoveManager = new MouseMoveManager(screen);
this.sessionStart = this.session.startedAt;
state.update({ sessionStart: this.sessionStart });
this.activityManager = new ActivityManager(this.session.duration.milliseconds); // only if not-live
this.activityManager = new ActivityManager(
this.session.duration.milliseconds
); // only if not-live
}
public getListsFullState = () => {
@ -139,12 +141,18 @@ export default class MessageManager {
return Object.values(this.tabs)[0].getListsFullState();
};
public injectSpriteMap = (spriteEl: SVGElement) => {
Object.values(this.tabs).forEach((tab) => {
tab.injectSpriteMap(spriteEl)
})
};
public setSession = (session: SessionFilesInfo) => {
this.session = session;
this.sessionStart = this.session.startedAt;
this.state.update({ sessionStart: this.sessionStart });
Object.values(this.tabs).forEach((tab) => tab.setSession(session));
}
};
public updateLists(lists: RawList) {
Object.keys(this.tabs).forEach((tab) => {
@ -198,26 +206,26 @@ export default class MessageManager {
* Scan tab managers for last message ts
* */
public createTabCloseEvents = () => {
const lastMsgArr: [string, number][] = []
const lastMsgArr: [string, number][] = [];
if (this.tabsAmount === 1) {
return this.tabCloseManager.append({
tabId: Object.keys(this.tabs)[0],
time: this.session.durationMs - 100
})
time: this.session.durationMs - 100,
});
}
for (const [tabId, tab] of Object.entries(this.tabs)) {
const { lastMessageTs } = tab
const { lastMessageTs } = tab;
if (lastMessageTs && tabId) {
lastMsgArr.push([tabId, lastMessageTs])
lastMsgArr.push([tabId, lastMessageTs]);
}
}
lastMsgArr.sort((a, b) => a[1] - b[1])
lastMsgArr.sort((a, b) => a[1] - b[1]);
lastMsgArr.forEach(([tabId, lastMessageTs]) => {
this.tabCloseManager.append({ tabId, time: lastMessageTs })
})
}
this.tabCloseManager.append({ tabId, time: lastMessageTs });
});
};
public startLoading = () => {
this.waitingForFiles = true;
@ -238,15 +246,15 @@ export default class MessageManager {
// usually means waiting for messages from live session
if (Object.keys(this.tabs).length === 0) return;
this.activeTabManager.moveReady(t).then(async (tabId) => {
const closeMessage = await this.tabCloseManager.moveReady(t)
const closeMessage = await this.tabCloseManager.moveReady(t);
if (closeMessage) {
const closedTabs = this.tabCloseManager.closedTabs
const closedTabs = this.tabCloseManager.closedTabs;
if (closedTabs.size === this.tabsAmount) {
if (this.session.durationMs - t < 250) {
this.state.update({ closedTabs: Array.from(closedTabs) })
this.state.update({ closedTabs: Array.from(closedTabs) });
}
} else {
this.state.update({ closedTabs: Array.from(closedTabs) })
this.state.update({ closedTabs: Array.from(closedTabs) });
}
}
// Moving mouse and setting :hover classes on ready view
@ -261,7 +269,8 @@ export default class MessageManager {
this.screen.cursor.shake();
}
if (!this.activeTab) {
this.activeTab = this.state.get().currentTab ?? Object.keys(this.tabs)[0];
this.activeTab =
this.state.get().currentTab ?? Object.keys(this.tabs)[0];
}
if (tabId) {
@ -291,8 +300,7 @@ export default class MessageManager {
});
if (
this.waitingForFiles ||
(this.lastMessageTime <= t &&
t < this.session.durationMs)
(this.lastMessageTime <= t && t < this.session.durationMs)
) {
this.setMessagesLoading(true);
}
@ -318,7 +326,12 @@ export default class MessageManager {
if (msg.tp === 9999) return;
if (!this.tabs[msg.tabId]) {
this.tabsAmount++;
this.state.update({ tabStates: { ...this.state.get().tabStates, [msg.tabId]: TabSessionManager.INITIAL_STATE } });
this.state.update({
tabStates: {
...this.state.get().tabStates,
[msg.tabId]: TabSessionManager.INITIAL_STATE,
},
});
this.tabs[msg.tabId] = new TabSessionManager(
this.session,
this.state,
@ -368,7 +381,11 @@ export default class MessageManager {
switch (msg.tp) {
case MType.CreateDocument:
if (!this.firstVisualEventSet) {
this.activeTabManager.unshift({ tp: MType.TabChange, tabId: msg.tabId, time: 0 });
this.activeTabManager.unshift({
tp: MType.TabChange,
tabId: msg.tabId,
time: 0,
});
this.state.update({
firstVisualEvent: msg.time,
currentTab: msg.tabId,
@ -387,9 +404,11 @@ export default class MessageManager {
this.updateChangeEvents();
}
this.screen.display(!messagesLoading);
const cssLoading = Object.values(this.state.get().tabStates).some((tab) => tab.cssLoading);
const isReady = !messagesLoading && !cssLoading
this.state.update({ messagesLoading, ready: isReady});
const cssLoading = Object.values(this.state.get().tabStates).some(
(tab) => tab.cssLoading
);
const isReady = !messagesLoading && !cssLoading;
this.state.update({ messagesLoading, ready: isReady });
};
decodeMessage(msg: Message) {

View file

@ -181,7 +181,7 @@ export default class Screen {
getElementFromInternalPoint({ x, y }: Point): Element | null {
// elementFromPoint && elementFromPoints require viewpoint-related coordinates,
// not document-related
// not document-related
return this.document?.elementFromPoint(x, y) || null;
}

View file

@ -121,6 +121,10 @@ export default class TabSessionManager {
return this.pagesManager.getNode(id);
};
public injectSpriteMap = (spriteMapEl: SVGElement) => {
this.pagesManager.injectSpriteMap(spriteMapEl);
}
public updateLists(lists: Partial<InitialLists>) {
Object.keys(lists).forEach((key: 'event' | 'stack' | 'exceptions') => {
const currentList = this.lists.lists[key];

View file

@ -7,7 +7,7 @@ import ListWalker from '../../../common/ListWalker';
import StylesManager from './StylesManager';
import FocusManager from './FocusManager';
import SelectionManager from './SelectionManager';
import type { StyleElement } from './VirtualDOM';
import { StyleElement, VSpriteMap } from "./VirtualDOM";
import {
OnloadStyleSheet,
VDocument,
@ -157,6 +157,12 @@ export default class DOMManager extends ListWalker<Message> {
return;
}
const parent = this.vElements.get(parentID) || this.olVRoots.get(parentID);
if ('tagName' in child && child.tagName === 'BODY') {
const spriteMap = new VSpriteMap('svg', true, Number.MAX_SAFE_INTEGER - 100, Number.MAX_SAFE_INTEGER - 100);
spriteMap.node.setAttribute('id', 'OPENREPLAY_SPRITES_MAP');
spriteMap.node.setAttribute('style', 'display: none;');
child.insertChildAt(spriteMap, Number.MAX_SAFE_INTEGER - 100);
}
if (!parent) {
logger.error(
`${id} Insert error. Parent vNode ${parentID} not found`,

View file

@ -53,7 +53,7 @@ export abstract class VNode<T extends Node = Node> {
public abstract applyChanges(): void
}
type VChild = VElement | VText
type VChild = VElement | VText | VSpriteMap
abstract class VParent<T extends Node = Node> extends VNode<T>{
/**
*/
@ -140,6 +140,44 @@ export class VShadowRoot extends VParent<ShadowRoot> {
export type VRoot = VDocument | VShadowRoot
export class VSpriteMap extends VParent<Element> {
parentNode: VParent | null =
null; /** Should be modified only by he parent itself */
private newAttributes: Map<string, string | false> = new Map();
constructor(
readonly tagName: string,
readonly isSVG = true,
public readonly index: number,
private readonly nodeId: number
) {
super();
this.createNode();
}
protected createNode() {
try {
const element = document.createElementNS(
'http://www.w3.org/2000/svg',
this.tagName
);
element.dataset['openreplayId'] = this.nodeId.toString();
return element;
} catch (e) {
console.error(
'Openreplay: Player received invalid html tag',
this.tagName,
e
);
return document.createElement(this.tagName.replace(/[^a-z]/gi, ''));
}
}
applyChanges() {
// this is a hack to prevent the sprite map from being removed from the DOM
return null;
}
}
export class VElement extends VParent<Element> {
parentNode: VParent | null = null /** Should be modified only by he parent itself */
private newAttributes: Map<string, string | false> = new Map()

View file

@ -79,15 +79,41 @@ export default class PagesManager extends ListWalker<DOMManager> {
return this.currentPage?.getNode(id);
}
spriteMapEl: SVGElement | null = null;
injectSpriteMap = (spriteEl: SVGElement) => {
this.spriteMapEl = spriteEl;
this.refreshSprites();
};
refreshSprites = () => {
const int = setInterval(() => {
const potential = this.screen.document?.body.querySelector(
'#OPENREPLAY_SPRITES_MAP'
);
if (potential) {
potential.innerHTML = this.spriteMapEl!.innerHTML;
clearInterval(int);
}
}, 250);
}
moveReady(t: number): Promise<void> {
const requiredPage = this.moveGetLast(t);
let changed = false;
if (requiredPage != null) {
this.currentPage?.clearSelectionManager();
this.currentPage = requiredPage;
this.currentPage.reset(); // Otherwise it won't apply create_document
changed = true;
}
if (this.currentPage != null) {
return this.currentPage.moveReady(t);
return this.currentPage.moveReady(t).then(() => {
if (changed && this.spriteMapEl) {
setTimeout(() => {
this.refreshSprites();
}, 0)
}
})
}
return Promise.resolve();
}

View file

@ -105,25 +105,38 @@ export interface IResourceRequest extends IResource {
decodedBodySize?: number,
}
const getGraphqlReqName = (resource: IResource) => {
try {
if (!resource.request) return getResourceName(resource.url)
const req = JSON.parse(resource.request)
const body = JSON.parse(req.body)
return /query (\w+)/.exec(body.query)?.[1]
} catch (e) {
return getResourceName(resource.url)
}
}
export const Resource = (resource: IResource) => ({
...resource,
name: getResourceName(resource.url),
isRed: !resource.success || resource.error, //|| resource.score >= RED_BOUND,
isYellow: false, // resource.score < RED_BOUND && resource.score >= YELLOW_BOUND,
})
export const Resource = (resource: IResource) => {
const name = resource.type === 'graphql' ? getGraphqlReqName(resource) : getResourceName(resource.url)
return {
...resource,
name,
isRed: !resource.success || resource.error, //|| resource.score >= RED_BOUND,
isYellow: false, // resource.score < RED_BOUND && resource.score >= YELLOW_BOUND,
}
}
export function getResourceFromResourceTiming(msg: ResourceTiming, sessStart: number) {
// duration might be duration=0 when cached
const success = msg.duration > 0 || msg.encodedBodySize > 0 || msg.transferredSize > 0
const failed = msg.duration === 0 && msg.ttfb === 0 && msg.headerSize === 0 && msg.encodedBodySize === 0 && msg.transferredSize === 0
const type = getResourceType(msg.initiator, msg.url)
return Resource({
...msg,
type,
method: type === ResourceType.FETCH ? ".." : "GET", // should be GET for all non-XHR/Fetch resources, right?
success,
status: success ? '2xx-3xx' : '4xx-5xx',
success: !failed,
status: !failed ? '2xx-3xx' : '4xx-5xx',
time: Math.max(0, msg.timestamp - sessStart)
})
}

View file

@ -1,3 +1,8 @@
AWS_ACCESS_KEY_ID=${COMMON_S3_KEY}
AWS_SECRET_ACCESS_KEY=${COMMON_S3_SECRET}
AWS_ENDPOINT='http://minio:9000'
AWS_REGION='us-east-1'
BUCKET_NAME=mobs
LICENSE_KEY=''
KAFKA_SERVERS='kafka.db.svc.cluster.local:9092'
KAFKA_USE_SSL='false'

View file

@ -43,6 +43,30 @@ spec:
{{- .Values.healthCheck | toYaml | nindent 10}}
{{- end}}
env:
- name: AWS_ACCESS_KEY_ID
{{- if .Values.global.s3.existingSecret }}
valueFrom:
secretKeyRef:
name: {{ .Values.global.s3.existingSecret }}
key: access-key
{{- else }}
value: {{ .Values.global.s3.accessKey }}
{{- end }}
- name: AWS_SECRET_ACCESS_KEY
{{- if .Values.global.s3.existingSecret }}
valueFrom:
secretKeyRef:
name: {{ .Values.global.s3.existingSecret }}
key: secret-key
{{- else }}
value: {{ .Values.global.s3.secretKey }}
{{- end }}
- name: AWS_ENDPOINT
value: '{{ .Values.global.s3.endpoint }}'
- name: AWS_REGION
value: '{{ .Values.global.s3.region }}'
- name: BUCKET_NAME
value: {{ .Values.global.s3.recordingsBucket }}
- name: JWT_SECRET
value: '{{ .Values.global.jwtSecret }}'
- name: LICENSE_KEY

View file

@ -30,16 +30,50 @@ export const recordGraphQL = tracker.use(createGraphqlMiddleware());
### Relay
If you're using [Relay network tools](https://github.com/relay-tools/react-relay-network-modern),
you can simply [create a middleware](https://github.com/relay-tools/react-relay-network-modern/tree/master?tab=readme-ov-file#example-of-injecting-networklayer-with-middlewares-on-the-client-side)
you can simply [create a middleware](https://github.com/relay-tools/react-relay-network-modern/tree/master?tab=readme-ov-file#example-of-injecting-networklayer-with-middlewares-on-the-client-side) (async based); otherwise this will require wrapping fetch function with Observable.
```js
import { createRelayMiddleware } from '@openreplay/tracker-graphql';
import { Observable } from 'relay-runtime';
const trackerMiddleware = tracker.use(createRelayMiddleware());
const withTracker = tracker.use(createRelayMiddleware())
function createFetchObservable(operation, variables) {
return Observable.create(sink => {
fetch(`YOUR URL`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ query: operation.text, variables }),
})
.then(response => {
if (!response.ok) {
throw new Error(response.statusText);
}
return response.json();
})
.then(data => {
sink.next(data);
sink.complete();
})
.catch(error => {
sink.error(error);
})
});
}
const network = Network.create(withTracker(createFetchObservable));
const environment = new Environment({
network,
store: new Store(new RecordSource()),
});
```
```js
import { createRelayToolsMiddleware } from '@openreplay/tracker-graphql';
const trackerMiddleware = tracker.use(createRelayToolsMiddleware());
const network = new RelayNetworkLayer([
// your middleware
// ,
trackerMiddleware,
]);
```

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-graphql",
"description": "Tracker plugin for GraphQL requests recording",
"version": "4.1.0",
"version": "4.2.0",
"keywords": [
"graphql",
"logging",
@ -31,7 +31,6 @@
"typescript": "^5.3.3"
},
"dependencies": {
"@apollo/client": "^3.9.5",
"@types/zen-observable": "^0.8.7",
"zen-observable": "^0.10.0"
}

View file

@ -1,10 +1,11 @@
import createTrackerLink from './apolloMiddleware.js';
import createRelayMiddleware from './relayMiddleware.js';
import { createRelayObserver, createRelayMiddleware } from './relayMiddleware.js';
import createGraphqlMiddleware from './graphqlMiddleware.js';
import { Sanitizer } from './types.js';
export {
createTrackerLink,
createRelayObserver,
createRelayMiddleware,
createGraphqlMiddleware,
Sanitizer,

View file

@ -1,6 +1,95 @@
import { App, Messages } from '@openreplay/tracker';
import type { Middleware, RelayRequest } from './relaytypes';
import { Sanitizer } from './types';
import Observable from 'zen-observable';
interface GraphQLOperation {
name: string;
operationKind: string;
text?: string | null;
}
interface GraphQLVariables {
[key: string]: any;
}
interface GraphQLCacheConfig {
[key: string]: any;
}
interface FetchFunction {
(
operation: GraphQLOperation,
variables: GraphQLVariables,
cacheConfig: GraphQLCacheConfig,
uploadables?: any
): Observable<unknown>;
}
function safeStringify(value: unknown) {
try {
return JSON.stringify(value);
} catch {
// If we cant stringify (e.g., cyclic object), return a placeholder
return '"[unserializable]"';
}
}
function createRelayObserver(sanitizer?: Sanitizer<Record<string, any>>) {
return (app: App | null) => {
return (originalFetch: FetchFunction) => (operation: GraphQLOperation, variables: GraphQLVariables, cacheConfig: GraphQLCacheConfig, uploadables?: any): Observable<any> => {
const startTime = Date.now();
const observable = originalFetch(operation, variables, cacheConfig, uploadables);
if (!app || !app.active()) {
return observable;
}
return new Observable(observer =>
observable.subscribe({
next: (data: any) => {
const duration = Date.now() - startTime;
const opName = operation.name;
const opKind = operation.operationKind;
const vars = JSON.stringify(sanitizer ? sanitizer(variables) : variables);
if (data.errors && data.errors.length > 0) {
const opResp = safeStringify(sanitizer ? sanitizer(data.errors) : data.errors);
app.send(Messages.GraphQL(
opKind,
`ERROR: ${opName}`,
vars,
opResp,
duration
));
} else {
const opResp = safeStringify(sanitizer ? sanitizer(data) : data);
app.send(Messages.GraphQL(
opKind,
opName,
vars,
opResp,
duration
));
}
observer.next(data);
},
error: err => {
const duration = Date.now() - startTime;
const opName = 'ERROR: ' + operation.name;
const opKind = operation.operationKind;
const vars = safeStringify(sanitizer ? sanitizer(variables) : variables);
const opResp = safeStringify(err);
app.send(Messages.GraphQL(opKind, opName, vars, opResp, duration));
observer.error(err);
},
complete: () => {
observer.complete();
}
})
)
}
}
}
const createRelayMiddleware = (sanitizer?: Sanitizer<Record<string, any>>) => {
return (app: App | null): Middleware => {
@ -52,4 +141,4 @@ function getMessage(
return Messages.GraphQL(opKind, opName, vars, opResp, duration);
}
export default createRelayMiddleware;
export { createRelayMiddleware, createRelayObserver };

View file

@ -3,11 +3,12 @@
"noImplicitThis": true,
"strictNullChecks": true,
"alwaysStrict": true,
"target": "es6",
"module": "es6",
"target": "es2020",
"module": "ESNext",
"moduleResolution": "node",
"declaration": true,
"outDir": "./lib",
"allowSyntheticDefaultImports": true
}
},
"include": ["src/**/*.ts"],
}

View file

@ -1,3 +1,8 @@
## 15.0.4
- support for spritemaps (svg with `use` tags)
- improvements for missing resources tracking
## 15.0.3
- fixing `failuresOnly` option for network

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "15.0.3",
"version": "15.0.4",
"keywords": [
"logging",
"replay"

View file

@ -4,7 +4,13 @@ export function isNode(sth: any): sth is Node {
}
export function isSVGElement(node: Element): node is SVGElement {
return node.namespaceURI === 'http://www.w3.org/2000/svg'
return (
node.namespaceURI === 'http://www.w3.org/2000/svg' || node.localName === 'svg'
)
}
export function isUseElement(node: Element): node is SVGUseElement {
return node.localName === 'use'
}
export function isElementNode(node: Node): node is Element {

View file

@ -9,6 +9,7 @@ import {
MoveNode,
RemoveNode,
UnbindNodes,
SetNodeAttribute,
} from '../messages.gen.js'
import App from '../index.js'
import {
@ -16,10 +17,78 @@ import {
isTextNode,
isElementNode,
isSVGElement,
isUseElement,
hasTag,
isCommentNode,
} from '../guards.js'
const iconCache = {}
const domParser = new DOMParser()
async function parseUseEl(useElement: SVGUseElement, mode: 'inline' | 'dataurl' | 'svgtext') {
try {
const href = useElement.getAttribute('xlink:href') || useElement.getAttribute('href')
if (!href) {
console.debug('Openreplay: xlink:href or href not found on <use>.')
return
}
const [url, symbolId] = href.split('#')
if (!url || !symbolId) {
console.debug('Openreplay: Invalid xlink:href or href found on <use>.')
return
}
if (iconCache[symbolId]) {
return iconCache[symbolId]
}
const response = await fetch(url)
const svgText = await response.text()
const svgDoc = domParser.parseFromString(svgText, 'image/svg+xml')
const symbol = svgDoc.getElementById(symbolId)
if (!symbol) {
console.debug('Openreplay: Symbol not found in SVG.')
return
}
if (mode === 'inline') {
const res = { paths: symbol.innerHTML, vbox: symbol.getAttribute('viewBox') || '0 0 24 24' }
iconCache[symbolId] = res
return res
}
if (mode === 'svgtext') {
const inlineSvg = `
<svg xmlns="http://www.w3.org/2000/svg" viewBox="${symbol.getAttribute('viewBox') || '0 0 24 24'}">
${symbol.innerHTML}
</svg>
`.trim()
iconCache[symbolId] = inlineSvg
return inlineSvg
}
if (mode === 'dataurl') {
const inlineSvg = `
<svg xmlns="http://www.w3.org/2000/svg" viewBox="${symbol.getAttribute('viewBox') || '0 0 24 24'}">
${symbol.innerHTML}
</svg>
`
const encodedSvg = btoa(inlineSvg)
const dataUrl = `data:image/svg+xml;base64,${encodedSvg}`
iconCache[symbolId] = dataUrl
return dataUrl
}
console.debug(`Openreplay: Unknown mode: ${mode}. Use "inline" or "dataurl".`)
} catch (error) {
console.error('Openreplay: Error processing <use> element:', error)
}
}
function isIgnored(node: Node): boolean {
if (isCommentNode(node)) {
return true
@ -146,8 +215,8 @@ export default abstract class Observer {
{
acceptNode: (node) =>
isIgnored(node) || this.app.nodes.getID(node) === undefined
? NodeFilter.FILTER_REJECT
: NodeFilter.FILTER_ACCEPT,
? NodeFilter.FILTER_REJECT
: NodeFilter.FILTER_ACCEPT,
},
// @ts-ignore
false,
@ -178,13 +247,28 @@ export default abstract class Observer {
}
if (value === null) {
this.app.send(RemoveNodeAttribute(id, name))
} else if (name === 'href') {
if (value.length > 1e5) {
}
if (isUseElement(node) && name === 'href') {
parseUseEl(node, 'svgtext')
.then((svgData) => {
if (svgData) {
this.app.send(SetNodeAttribute(id, name, `_$OPENREPLAY_SPRITE$_${svgData}`))
}
})
.catch((e: any) => {
console.error('Openreplay: Error parsing <use> element:', e)
})
return
}
if (name === 'href') {
if (value!.length > 1e5) {
value = ''
}
this.app.send(SetNodeAttributeURLBased(id, name, value, this.app.getBaseHref()))
this.app.send(SetNodeAttributeURLBased(id, name, value!, this.app.getBaseHref()))
} else {
this.app.attributeSender.sendSetAttribute(id, name, value)
this.app.attributeSender.sendSetAttribute(id, name, value!)
}
return
}

View file

@ -121,6 +121,24 @@ export default function (app: App, opts: Partial<Options>): void {
if (shouldSkip) {
return
}
const failed = entry.responseEnd === 0
|| (entry.transferSize === 0 && entry.decodedBodySize === 0)
if (failed) {
app.send(
ResourceTiming(
entry.startTime + getTimeOrigin(),
0,
0,
0,
0,
0,
entry.name,
entry.initiatorType,
0,
true,
),
)
}
app.send(
ResourceTiming(
entry.startTime + getTimeOrigin(),