Compare commits
51 commits
main
...
assist_wit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0db952e764 | ||
|
|
fb58f3f155 | ||
|
|
eb2968435a | ||
|
|
9e070e4981 | ||
|
|
269b960035 | ||
|
|
cb4e318650 | ||
|
|
60b4225159 | ||
|
|
a3893ff8ff | ||
|
|
b09e2d1fc9 | ||
|
|
83294b1980 | ||
|
|
3e82da1a26 | ||
|
|
5cceea06ce | ||
|
|
c5bc804990 | ||
|
|
b72839f89f | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
112 changed files with 2283 additions and 1591 deletions
11
api/Pipfile
11
api/Pipfile
|
|
@ -6,16 +6,15 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.16"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.11"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_events(project_id: int):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT event_name, display_name
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY display_name;""",
|
||||
parameters={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(x)
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
for i, f in enumerate(data.filters):
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"isNotNull({column})"
|
||||
elif is_undefined:
|
||||
condition = f"isNull({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(f"{column} {op} %({f_k})s", f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
ev_constraints = []
|
||||
for i, e in enumerate(data.events):
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: e.event_name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
if len(e.properties.filters) > 0:
|
||||
for j, f in enumerate(e.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=p_k)}
|
||||
if f.is_predefined:
|
||||
sub_condition = f"{f.name} {op} %({p_k})s"
|
||||
else:
|
||||
sub_condition = f"properties.{f.name} {op} %({p_k})s"
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, f.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND ("
|
||||
for j, c in enumerate(sub_conditions):
|
||||
if j > 0:
|
||||
condition += " " + e.properties.operators[j - 1] + " " + c
|
||||
else:
|
||||
condition += c
|
||||
condition += ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "src": 2}
|
||||
19
api/chalicelib/core/product_analytics/properties.py
Normal file
19
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def get_properties(project_id: int, event_name):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name,
|
||||
all_properties.display_name
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id=%(project_id)s
|
||||
AND all_properties.project_id=%(project_id)s
|
||||
AND event_properties.event_name=%(event_name)s
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id,"event_name": event_name})
|
||||
properties = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(properties)
|
||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -671,24 +671,36 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.`$properties`.selector {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"sub.`$properties`.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
sh.multi_conditions(f"main.`$properties`.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
|
@ -870,12 +882,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
|
|
@ -1193,6 +1208,28 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
event_fiters = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k)}
|
||||
op = sh.get_sql_operator(property.operator)
|
||||
condition = f"main.properties.{property.name} {op} %({a_k})s"
|
||||
if property.is_predefined:
|
||||
condition = f"main.{property.name} {op} %({a_k})s"
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
event_fiters.append(event_where[-1])
|
||||
if len(event_fiters) > 0:
|
||||
events_conditions[-1]["condition"] += " AND ("
|
||||
for l, e_f in enumerate(event_fiters):
|
||||
if l > 0:
|
||||
events_conditions[-1]["condition"] += event.properties.operators[l - 1] + e_f
|
||||
else:
|
||||
events_conditions[-1]["condition"] += e_f
|
||||
events_conditions[-1]["condition"] += ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
|
|
@ -141,7 +141,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
) AS users_sessions;""",
|
||||
full_args)
|
||||
elif ids_only:
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
|
||||
|
|
@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
parameters=full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
|
||||
try:
|
||||
logging.debug("--------------------")
|
||||
sessions_list = cur.execute(main_query)
|
||||
logging.debug("--------------------")
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
|
|
@ -122,7 +122,10 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
sort = 'session_id'
|
||||
if data.sort is not None and data.sort != "session_id":
|
||||
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
if data.sort == 'datetime':
|
||||
sort = 'start_ts'
|
||||
else:
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
|
||||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,73 +1,185 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_driver
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, *, parameters=None):
|
||||
if parameters is None:
|
||||
return query
|
||||
return query % {
|
||||
key: f"'{value}'" if isinstance(value, str) else value
|
||||
for key, value in parameters.items()
|
||||
}
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
-- -- Original Q3
|
||||
-- WITH ranked_events AS (SELECT *
|
||||
-- FROM ranked_events_1736344377403),
|
||||
-- n1 AS (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM ranked_events
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- AND isNotNull(next_value)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n2 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n1
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 2
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n3 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n2
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 3
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n4 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n3
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 4
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n5 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n4
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 5
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8)
|
||||
-- SELECT *
|
||||
-- FROM (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n1
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n2
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n3
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n4
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n5) AS chart_steps
|
||||
-- ORDER BY event_number_in_session;
|
||||
|
||||
-- Q1
|
||||
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
|
||||
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
|
||||
FROM experimental.events AS events
|
||||
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
|
||||
AND events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
GROUP BY 1),
|
||||
pre_ranked_events AS (SELECT *
|
||||
FROM (SELECT session_id,
|
||||
event_type,
|
||||
datetime,
|
||||
url_path AS e_value,
|
||||
row_number() OVER (PARTITION BY session_id
|
||||
ORDER BY datetime ,
|
||||
message_id ) AS event_number_in_session
|
||||
FROM experimental.events AS events
|
||||
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
|
||||
WHERE events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
AND (events.event_type = 'LOCATION')
|
||||
AND events.datetime >= initial_event.start_event_timestamp
|
||||
) AS full_ranked_events
|
||||
WHERE event_number_in_session <= 5)
|
||||
SELECT *
|
||||
FROM pre_ranked_events);
|
||||
;
|
||||
|
||||
SELECT *
|
||||
FROM pre_ranked_events_1736344377403
|
||||
WHERE event_number_in_session < 3;
|
||||
|
||||
|
||||
|
||||
-- ---------Q2-----------
|
||||
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
|
||||
DROP TABLE ranked_events_1736344377403;
|
||||
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH pre_ranked_events AS (SELECT *
|
||||
FROM pre_ranked_events_1736344377403),
|
||||
start_points AS (SELECT DISTINCT session_id
|
||||
FROM pre_ranked_events
|
||||
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
|
||||
AND pre_ranked_events.event_number_in_session = 1),
|
||||
ranked_events AS (SELECT pre_ranked_events.*,
|
||||
leadInFrame(e_value)
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
|
||||
leadInFrame(toNullable(event_type))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
|
||||
FROM start_points
|
||||
INNER JOIN pre_ranked_events USING (session_id))
|
||||
SELECT *
|
||||
FROM ranked_events);
|
||||
|
||||
|
||||
-- ranked events
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
-- AND e_value='/en/deployment/deploy-docker/'
|
||||
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
|
||||
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
|
||||
AND isNotNull(next_value)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
|
||||
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
SELECT COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
AND isNull(next_value)
|
||||
;
|
||||
|
||||
-- ---------Q3 MORE -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
-- TODO: make this as top_steps, where every step will go to next as top/others
|
||||
top_n1 AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n2 AS (-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE (event_type, e_value) IN (SELECT event_type,
|
||||
e_value
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
GROUP BY event_type, e_value
|
||||
ORDER BY SUM(sessions_count) DESC
|
||||
LIMIT 3)
|
||||
ORDER BY sessions_count DESC),
|
||||
top_n AS (SELECT *
|
||||
FROM top_n1
|
||||
UNION ALL
|
||||
SELECT *
|
||||
FROM top_n2),
|
||||
u_top_n AS (SELECT DISTINCT event_number_in_session,
|
||||
event_type,
|
||||
e_value
|
||||
FROM top_n),
|
||||
others_n AS (
|
||||
-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3)
|
||||
SELECT *
|
||||
FROM (
|
||||
-- Top
|
||||
SELECT *
|
||||
FROM top_n
|
||||
-- UNION ALL
|
||||
-- -- Others
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count)
|
||||
-- FROM others_n
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Top go to Drop
|
||||
-- SELECT drop_n.event_number_in_session,
|
||||
-- drop_n.event_type,
|
||||
-- drop_n.e_value,
|
||||
-- drop_n.next_type,
|
||||
-- drop_n.next_value,
|
||||
-- drop_n.sessions_count
|
||||
-- FROM drop_n
|
||||
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
|
||||
-- AND drop_n.event_type = u_top_n.event_type
|
||||
-- AND drop_n.e_value = u_top_n.e_value)
|
||||
-- ORDER BY drop_n.event_number_in_session
|
||||
-- -- -- UNION ALL
|
||||
-- -- -- Top go to Others
|
||||
-- SELECT top_n.event_number_in_session,
|
||||
-- top_n.event_type,
|
||||
-- top_n.e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(top_n.sessions_count) AS sessions_count
|
||||
-- FROM top_n
|
||||
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
|
||||
-- AND top_n.next_type = others_n.event_type
|
||||
-- AND top_n.next_value = others_n.e_value)
|
||||
-- WHERE others_n.event_number_in_session IS NULL
|
||||
-- AND top_n.next_type IS NOT NULL
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Others got to Top
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- others_n.s_next_type AS next_type,
|
||||
-- others_n.s_next_value AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
|
||||
-- others_n.s_next_type = top_n.event_type AND
|
||||
-- others_n.s_next_value = top_n.event_type)
|
||||
-- GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
-- UNION ALL
|
||||
-- -- TODO: find if this works or not
|
||||
-- -- Others got to Others
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- 'OTHERS' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
|
||||
-- AND others_n.s_next_type = u_top_n.event_type
|
||||
-- AND others_n.s_next_value = u_top_n.e_value)
|
||||
-- WHERE u_top_n.event_number_in_session IS NULL
|
||||
-- GROUP BY others_n.event_number_in_session
|
||||
)
|
||||
ORDER BY event_number_in_session;
|
||||
|
||||
|
||||
-- ---------Q3 TOP ON VALUE ONLY -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
top_n AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
LIMIT 1
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n2
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n3
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n_with_next AS (SELECT n1.*
|
||||
FROM n1
|
||||
UNION ALL
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
|
||||
AND n2.event_type = top_n.event_type
|
||||
AND n2.e_value = top_n.e_value)),
|
||||
others_n AS (
|
||||
-- STEP 2
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 2)
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT n3.*
|
||||
FROM n3
|
||||
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 3))
|
||||
SELECT *
|
||||
FROM (
|
||||
-- SELECT sum(top_n_with_next.sessions_count)
|
||||
-- FROM top_n_with_next
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- -- AND isNotNull(next_value)
|
||||
-- AND (next_type, next_value) IN
|
||||
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
|
||||
-- -- SELECT * FROM others_n
|
||||
-- -- SELECT * FROM n2
|
||||
-- SELECT *
|
||||
-- FROM top_n
|
||||
-- );
|
||||
-- Top to Top: valid
|
||||
SELECT top_n_with_next.*
|
||||
FROM top_n_with_next
|
||||
INNER JOIN top_n
|
||||
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
|
||||
AND top_n_with_next.next_type = top_n.event_type
|
||||
AND top_n_with_next.next_value = top_n.e_value)
|
||||
UNION ALL
|
||||
-- Top to Others: valid
|
||||
SELECT top_n_with_next.event_number_in_session,
|
||||
top_n_with_next.event_type,
|
||||
top_n_with_next.e_value,
|
||||
'OTHER' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(top_n_with_next.sessions_count) AS sessions_count
|
||||
FROM top_n_with_next
|
||||
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
|
||||
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
|
||||
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
|
||||
UNION ALL
|
||||
-- Top go to Drop: valid
|
||||
SELECT drop_n.event_number_in_session,
|
||||
drop_n.event_type,
|
||||
drop_n.e_value,
|
||||
drop_n.next_type,
|
||||
drop_n.next_value,
|
||||
drop_n.sessions_count
|
||||
FROM drop_n
|
||||
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
|
||||
AND drop_n.event_type = top_n.event_type
|
||||
AND drop_n.e_value = top_n.e_value)
|
||||
ORDER BY drop_n.event_number_in_session
|
||||
UNION ALL
|
||||
-- Others got to Drop: valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
UNION ALL
|
||||
-- Others got to Top:valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
others_n.next_type,
|
||||
others_n.next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
|
||||
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
|
||||
UNION ALL
|
||||
-- Others got to Others
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'OTHERS' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session)
|
||||
ORDER BY event_number_in_session, sessions_count
|
||||
DESC;
|
||||
|
||||
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
28
api/routers/subs/product_analytics.py
Normal file
28
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import schemas
|
||||
from chalicelib.core.product_analytics import events, properties
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from fastapi import Body, Depends, BackgroundTasks
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, event_name: str = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not event_name or len(event_name) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_properties(project_id=projectId, event_name=event_name)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
19
api/schemas/product_analytics.py
Normal file
19
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Optional, List
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
event_name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[EventSearchSchema] = Field(default_factory=list, description="operator between events is OR")
|
||||
filters: List[PropertyFilterSchema] = Field(default_factory=list, description="operator between filters is AND")
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -545,6 +545,70 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operators: List[Literal["and", "or"]] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def event_filter_validator(self):
|
||||
assert len(self.filters) == 0 \
|
||||
or len(self.operators) == len(self.filters) - 1, \
|
||||
"Number of operators must match the number of filter-1"
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
|
|
@ -553,6 +617,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -1529,3 +1594,30 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
const {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getAvailableRooms
|
||||
extractPayloadFromRequest
|
||||
} = require("./helper");
|
||||
|
||||
module.exports = {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getAvailableRooms
|
||||
extractPayloadFromRequest
|
||||
}
|
||||
|
|
@ -261,10 +261,6 @@ const uniqueAutocomplete = function (list) {
|
|||
return _list;
|
||||
}
|
||||
|
||||
const getAvailableRooms = async function (io) {
|
||||
return io.sockets.adapter.rooms;
|
||||
}
|
||||
|
||||
const getCompressionConfig = function () {
|
||||
// WS: The theoretical overhead per socket is 19KB (11KB for compressor and 8KB for decompressor)
|
||||
let perMessageDeflate = false;
|
||||
|
|
@ -305,6 +301,5 @@ module.exports = {
|
|||
extractPayloadFromRequest,
|
||||
sortPaginate,
|
||||
uniqueAutocomplete,
|
||||
getAvailableRooms,
|
||||
getCompressionConfig
|
||||
};
|
||||
|
|
@ -27,17 +27,22 @@ const respond = function (req, res, data) {
|
|||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
if (!res.aborted) {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
} else {
|
||||
logger.debug("response aborted");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
RecordRequestDuration(req.method.toLowerCase(), res.handlerName, 200, duration/1000.0);
|
||||
}
|
||||
|
||||
const getParticularSession = async function (roomId, filters) {
|
||||
let connected_sockets = await fetchSockets(roomId);
|
||||
const getParticularSession = async function (roomId, filters, all=false) {
|
||||
let connected_sockets = await fetchSockets(roomId, all);
|
||||
if (connected_sockets.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -167,7 +172,7 @@ const socketsLiveBySession = async function (req, res) {
|
|||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
let sessInfo = await getParticularSession(`${_projectKey}-${_sessionId}`, filters);
|
||||
let sessInfo = await getParticularSession(`${_projectKey}-${_sessionId}`, filters, true);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
return respond(req, res, null);
|
||||
|
|
|
|||
|
|
@ -13,7 +13,9 @@ const {
|
|||
handleEvent
|
||||
} = require("./stats");
|
||||
const {
|
||||
getServer
|
||||
sendTo,
|
||||
sendFrom,
|
||||
fetchSockets
|
||||
} = require('../utils/wsServer');
|
||||
const {
|
||||
IncreaseTotalWSConnections,
|
||||
|
|
@ -26,9 +28,9 @@ const {
|
|||
const {logger} = require('./logger');
|
||||
const deepMerge = require('@fastify/deepmerge')({all: true});
|
||||
|
||||
const findSessionSocketId = async (io, roomId, tabId) => {
|
||||
const findSessionSocketId = async (roomId, tabId) => {
|
||||
let pickFirstSession = tabId === undefined;
|
||||
const connected_sockets = await io.in(roomId).fetchSockets();
|
||||
const connected_sockets = await fetchSockets(roomId);
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
if (pickFirstSession) {
|
||||
|
|
@ -41,9 +43,9 @@ const findSessionSocketId = async (io, roomId, tabId) => {
|
|||
return null;
|
||||
};
|
||||
|
||||
async function getRoomData(io, roomID) {
|
||||
async function getRoomData(roomID) {
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
const connected_sockets = await io.in(roomID).fetchSockets();
|
||||
const connected_sockets = await fetchSockets(roomID);
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
|
|
@ -77,8 +79,7 @@ async function onConnect(socket) {
|
|||
IncreaseTotalWSConnections(socket.handshake.query.identity);
|
||||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||
|
||||
const io = getServer();
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
|
|
@ -86,7 +87,7 @@ async function onConnect(socket) {
|
|||
for (let tab of tabIDs) {
|
||||
if (tab === socket.handshake.query.tabId) {
|
||||
logger.debug(`session already connected, refusing new connexion, peerId: ${socket.handshake.query.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
return socket.disconnect();
|
||||
}
|
||||
}
|
||||
|
|
@ -100,12 +101,12 @@ async function onConnect(socket) {
|
|||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
logger.debug(`notifying new session about agent-existence`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
logger.debug(`notifying new agent about no SESSIONS with peerId:${socket.handshake.query.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
await socket.join(socket.handshake.query.roomId);
|
||||
|
||||
|
|
@ -118,7 +119,7 @@ async function onConnect(socket) {
|
|||
// Stats
|
||||
startAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
}
|
||||
|
||||
// Set disconnect handler
|
||||
|
|
@ -144,13 +145,12 @@ async function onDisconnect(socket) {
|
|||
logger.debug(`${socket.id} disconnected from ${socket.handshake.query.roomId}`);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
// Stats
|
||||
endAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
logger.debug("checking for number of connected agents and sessions");
|
||||
const io = getServer();
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (tabsCount === -1 && agentsCount === -1) {
|
||||
DecreaseOnlineRooms();
|
||||
|
|
@ -159,11 +159,11 @@ async function onDisconnect(socket) {
|
|||
}
|
||||
if (tabsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no SESSIONS`);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
if (agentsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no AGENTS`);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -178,13 +178,12 @@ async function onUpdateEvent(socket, ...args) {
|
|||
socket.handshake.query.sessionInfo = deepMerge(socket.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
|
||||
// Update sessionInfo for all agents in the room
|
||||
const io = getServer();
|
||||
const connected_sockets = await io.in(socket.handshake.query.roomId).fetchSockets();
|
||||
const connected_sockets = await fetchSockets(socket.handshake.query.roomId);
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
item.handshake.query.sessionInfo = deepMerge(item.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
} else if (item.handshake.query.identity === IDENTITIES.agent) {
|
||||
socket.to(item.id).emit(EVENTS_DEFINITION.listen.UPDATE_EVENT, args[0]);
|
||||
sendFrom(socket, item.id, EVENTS_DEFINITION.emit.UPDATE_EVENT, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -194,7 +193,7 @@ async function onWebrtcAgentHandler(socket, ...args) {
|
|||
const agentIdToConnect = args[0]?.data?.toAgentId;
|
||||
logger.debug(`${socket.id} sent webrtc event to agent:${agentIdToConnect}`);
|
||||
if (agentIdToConnect && socket.handshake.sessionData.AGENTS_CONNECTED.includes(agentIdToConnect)) {
|
||||
socket.to(agentIdToConnect).emit(EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, args[0]);
|
||||
sendFrom(socket, agentIdToConnect, EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -207,19 +206,18 @@ async function onAny(socket, eventName, ...args) {
|
|||
args[0] = updateSessionData(socket, args[0])
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to room:${socket.handshake.query.roomId}`);
|
||||
socket.to(socket.handshake.query.roomId).emit(eventName, args[0]);
|
||||
sendFrom(socket, socket.handshake.query.roomId, eventName, args[0]);
|
||||
} else {
|
||||
// Stats
|
||||
handleEvent(eventName, socket, args[0]);
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to session of room:${socket.handshake.query.roomId}`);
|
||||
const io = getServer();
|
||||
let socketId = await findSessionSocketId(io, socket.handshake.query.roomId, args[0]?.meta?.tabId);
|
||||
let socketId = await findSessionSocketId(socket.handshake.query.roomId, args[0]?.meta?.tabId);
|
||||
if (socketId === null) {
|
||||
logger.debug(`session not found for:${socket.handshake.query.roomId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
} else {
|
||||
logger.debug("message sent");
|
||||
io.to(socketId).emit(eventName, socket.id, args[0]);
|
||||
sendTo(socket.id, eventName, socket.id, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -234,4 +232,4 @@ function updateSessionData(socket, sessionData) {
|
|||
|
||||
module.exports = {
|
||||
onConnect,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,15 @@ const {getCompressionConfig} = require("./helper");
|
|||
|
||||
let io;
|
||||
|
||||
const getServer = function () {
|
||||
return io;
|
||||
function sendFrom(from, to, eventName, ...data) {
|
||||
from.to(to).emit(eventName, ...data);
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
function sendTo(to, eventName, ...data) {
|
||||
sendFrom(io, to, eventName, ...data);
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID, all=false) {
|
||||
if (!io) {
|
||||
return [];
|
||||
}
|
||||
|
|
@ -35,6 +39,7 @@ const createSocketIOServer = function (server, prefix) {
|
|||
|
||||
module.exports = {
|
||||
createSocketIOServer,
|
||||
getServer,
|
||||
sendTo,
|
||||
sendFrom,
|
||||
fetchSockets,
|
||||
}
|
||||
|
|
@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
|
||||
// Add tracker version to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Handler's logic
|
||||
if req.ProjectKey == nil {
|
||||
|
|
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
// Add projectID to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
|
||||
|
||||
// Validate tracker version
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the project supports mobile sessions
|
||||
if !p.IsWeb() {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)
|
||||
|
|
|
|||
9
ee/api/.gitignore
vendored
9
ee/api/.gitignore
vendored
|
|
@ -223,11 +223,14 @@ Pipfile.lock
|
|||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/sessions/__init__.py
|
||||
/chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
/chalicelib/core/sessions/sessions_search_exp.py
|
||||
/chalicelib/core/metrics/modules
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
/chalicelib/core/sourcemaps_parser.py
|
||||
/chalicelib/core/sourcemaps
|
||||
/chalicelib/core/tags.py
|
||||
/chalicelib/core/product_analytics
|
||||
/chalicelib/saml
|
||||
/chalicelib/utils/__init__.py
|
||||
/chalicelib/utils/args_transformer.py
|
||||
|
|
@ -290,3 +293,5 @@ Pipfile.lock
|
|||
/chalicelib/core/errors/errors_ch.py
|
||||
/chalicelib/core/errors/errors_details.py
|
||||
/chalicelib/utils/contextual_validators.py
|
||||
/routers/subs/product_analytics.py
|
||||
/schemas/product_analytics.py
|
||||
|
|
|
|||
|
|
@ -6,23 +6,20 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.16"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.11"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
gunicorn = "==23.0.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
apscheduler = "==3.11.0"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.20"
|
||||
redis = "==5.2.1"
|
||||
azure-storage-blob = "==12.24.1"
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
|
|||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
|
|
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/ap")
|
||||
app.include_router(product_analytics.app, prefix="/ap")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/ap")
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
app.include_router(saml.public_app)
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search as sessions_search_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_exp as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search as sessions_search
|
||||
|
|
@ -44,12 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
|
|||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/sessions/__init__.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
|
||||
rm -rf ./chalicelib/core/metrics/modules
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps.py
|
||||
rm -rf ./chalicelib/core/sourcemaps_parser.py
|
||||
rm -rf ./chalicelib/core/sourcemaps
|
||||
rm -rf ./chalicelib/core/user_testing.py
|
||||
rm -rf ./chalicelib/core/tags.py
|
||||
rm -rf ./chalicelib/core/product_analytics
|
||||
rm -rf ./chalicelib/saml
|
||||
rm -rf ./chalicelib/utils/__init__.py
|
||||
rm -rf ./chalicelib/utils/args_transformer.py
|
||||
|
|
@ -110,3 +113,5 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
|
|||
rm -rf ./chalicelib/core/errors/errors_ch.py
|
||||
rm -rf ./chalicelib/core/errors/errors_details.py
|
||||
rm -rf ./chalicelib/utils/contextual_validators.py
|
||||
rm -rf ./routers/subs/product_analytics.py
|
||||
rm -rf ./schemas/product_analytics.py
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
gunicorn==23.0.0
|
||||
python-decouple==3.8
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from .schemas import *
|
||||
from .schemas_ee import *
|
||||
from .assist_stats_schema import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
|
|||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import schemas
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .overrides import BaseModel, Enum
|
||||
from .transformers_validators import remove_whitespace
|
||||
|
||||
|
||||
|
|
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
|||
return values
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[schemas.IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class AssistRecordUpdatePayloadSchema(BaseModel):
|
||||
name: str = Field(..., min_length=1)
|
||||
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
||||
|
|
|
|||
4
ee/assist/.gitignore
vendored
4
ee/assist/.gitignore
vendored
|
|
@ -15,7 +15,7 @@ servers/sourcemaps-server.js
|
|||
/utils/HeapSnapshot.js
|
||||
/utils/helper.js
|
||||
/utils/assistHelper.js
|
||||
/utils/httpHandlers.js
|
||||
/utils/socketHandlers.js
|
||||
#/utils/httpHandlers.js
|
||||
#/utils/socketHandlers.js
|
||||
.local
|
||||
*.mmdb
|
||||
|
|
@ -83,9 +83,11 @@ if (process.env.uws !== "true") {
|
|||
const uWrapper = function (fn) {
|
||||
return (res, req) => {
|
||||
res.id = 1;
|
||||
res.aborted = false;
|
||||
req.startTs = performance.now(); // track request's start timestamp
|
||||
req.method = req.getMethod();
|
||||
res.onAborted(() => {
|
||||
res.aborted = true;
|
||||
onAbortedOrFinishedResponse(res);
|
||||
});
|
||||
return fn(req, res);
|
||||
|
|
|
|||
|
|
@ -21,7 +21,9 @@ const {createAdapter} = require("@socket.io/redis-adapter");
|
|||
const {createClient} = require("redis");
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
const pubClient = createClient({url: REDIS_URL});
|
||||
pubClient.on("error", (error) => logger.error(`Pub redis client error : ${error}`));
|
||||
const subClient = pubClient.duplicate();
|
||||
subClient.on("error", (error) => logger.error(`Sub redis client error : ${error}`));
|
||||
logger.info(`Using Redis: ${REDIS_URL}`);
|
||||
|
||||
const wsRouter = express.Router();
|
||||
|
|
@ -47,7 +49,7 @@ module.exports = {
|
|||
Promise.all([pubClient.connect(), subClient.connect()])
|
||||
.then(() => {
|
||||
io.adapter(createAdapter(pubClient, subClient,
|
||||
{requestsTimeout: process.env.REDIS_REQUESTS_TIMEOUT || 5000}));
|
||||
{requestsTimeout: parseInt(process.env.REDIS_REQUESTS_TIMEOUT) || 10000}));
|
||||
logger.info("> redis connected.");
|
||||
})
|
||||
.catch((err) => {
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
const {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getAvailableRooms
|
||||
extractPayloadFromRequest
|
||||
} = require('../utils/helper-ee');
|
||||
|
||||
module.exports = {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getAvailableRooms
|
||||
extractPayloadFromRequest
|
||||
}
|
||||
|
|
@ -91,13 +91,7 @@ const extractPayloadFromRequest = async function (req, res) {
|
|||
logger.debug("payload/filters:" + JSON.stringify(filters))
|
||||
return Object.keys(filters).length > 0 ? filters : undefined;
|
||||
}
|
||||
const getAvailableRooms = async function (io) {
|
||||
if (process.env.redis === "true") {
|
||||
return io.of('/').adapter.allRooms();
|
||||
} else {
|
||||
return helper.getAvailableRooms(io);
|
||||
}
|
||||
}
|
||||
|
||||
const getCompressionConfig = function () {
|
||||
if (process.env.uws !== "true") {
|
||||
return helper.getCompressionConfig();
|
||||
|
|
@ -121,6 +115,5 @@ module.exports = {
|
|||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getCompressionConfig,
|
||||
getAvailableRooms
|
||||
getCompressionConfig
|
||||
};
|
||||
212
ee/assist/utils/httpHandlers.js
Normal file
212
ee/assist/utils/httpHandlers.js
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
const {
|
||||
hasFilters,
|
||||
hasQuery,
|
||||
isValidSession,
|
||||
sortPaginate,
|
||||
getValidAttributes,
|
||||
uniqueAutocomplete
|
||||
} = require("./helper");
|
||||
const {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
} = require("./extractors");
|
||||
const {
|
||||
RecordRequestDuration,
|
||||
IncreaseTotalRequests
|
||||
} = require('../utils/metrics');
|
||||
const {fetchSockets, getSessionFromCache} = require("./wsServer");
|
||||
const {IDENTITIES} = require("./assistHelper");
|
||||
const {logger} = require('./logger');
|
||||
|
||||
const respond = function (req, res, data) {
|
||||
logger.debug("responding with data: ", JSON.stringify(data))
|
||||
let result = {data}
|
||||
if (process.env.uws !== "true") {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
if (!res.aborted) {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
} else {
|
||||
logger.debug("response aborted");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
RecordRequestDuration(req.method.toLowerCase(), res.handlerName, 200, duration/1000.0);
|
||||
}
|
||||
|
||||
const getParticularSession = async function (roomId, filters, all=false) {
|
||||
let sessInfo = await getSessionFromCache(roomId);
|
||||
if (!sessInfo) {
|
||||
return null;
|
||||
}
|
||||
if (!hasFilters(filters)) {
|
||||
return sessInfo;
|
||||
}
|
||||
const result = isValidSession(sessInfo, filters.filter)
|
||||
if (result.matched) {
|
||||
return sessInfo;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const getAllSessions = async function (projectKey, filters, counters, onlineOnly= false) {
|
||||
const sessions = [];
|
||||
const connected_sockets = await fetchSockets();
|
||||
if (connected_sockets.length === 0) {
|
||||
return sessions;
|
||||
}
|
||||
|
||||
const rooms = new Map();
|
||||
for (let item of connected_sockets) {
|
||||
// Prefilter checks
|
||||
if (rooms.has(item.handshake.query.roomId)) {
|
||||
continue;
|
||||
}
|
||||
if (item.handshake.query.projectKey !== projectKey || !item.handshake.query.sessionInfo) {
|
||||
continue;
|
||||
}
|
||||
if (onlineOnly && item.handshake.query.identity !== IDENTITIES.session) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Mark this room as visited
|
||||
rooms.set(item.handshake.query.roomId, true);
|
||||
|
||||
// Add session to the list without filtering
|
||||
if (!hasFilters(filters)) {
|
||||
sessions.push(item.handshake.query.sessionInfo);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add session to the list if it passes the filter
|
||||
const result = isValidSession(item.handshake.query.sessionInfo, filters.filter)
|
||||
if (result.matched) {
|
||||
sessions.push(item.handshake.query.sessionInfo);
|
||||
// Add filter name/value to counter
|
||||
for (const [filterName, filterValue] of Object.entries(result.filters)) {
|
||||
if (counters[filterName] === undefined) {
|
||||
counters[filterName] = {};
|
||||
}
|
||||
if (counters[filterName][filterValue] === undefined) {
|
||||
counters[filterName][filterValue] = 0;
|
||||
}
|
||||
counters[filterName][filterValue] += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
}
|
||||
|
||||
// Sort by projectKey
|
||||
const socketsListByProject = async function (req, res) {
|
||||
logger.debug("[WS]looking for available sessions");
|
||||
res.handlerName = 'socketsListByProject';
|
||||
|
||||
const _projectKey = extractProjectKeyFromRequest(req);
|
||||
const _sessionId = extractSessionIdFromRequest(req);
|
||||
const filters = await extractPayloadFromRequest(req, res);
|
||||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
const sessInfo = await getParticularSession(_sessionId, filters);//(`${_projectKey}-${_sessionId}`, filters);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
|
||||
// find all sessions for a project
|
||||
const counters = {};
|
||||
const sessions = await getAllSessions(_projectKey, filters, counters);
|
||||
|
||||
// send response
|
||||
respond(req, res, sortPaginate(sessions, filters, counters));
|
||||
}
|
||||
|
||||
// Sort by projectKey
|
||||
const socketsLiveByProject = async function (req, res) {
|
||||
logger.debug("[WS]looking for available LIVE sessions");
|
||||
res.handlerName = 'socketsLiveByProject';
|
||||
|
||||
const _projectKey = extractProjectKeyFromRequest(req);
|
||||
const _sessionId = extractSessionIdFromRequest(req);
|
||||
const filters = await extractPayloadFromRequest(req, res);
|
||||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
let sessInfo = await getParticularSession(_sessionId, filters);//(`${_projectKey}-${_sessionId}`, filters);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
|
||||
// find all sessions for a project
|
||||
const counters = {};
|
||||
const sessions = await getAllSessions(_projectKey, filters, counters, true);
|
||||
|
||||
// send response
|
||||
respond(req, res, sortPaginate(sessions, filters, counters));
|
||||
}
|
||||
|
||||
// Sort by roomID (projectKey+sessionId)
|
||||
const socketsLiveBySession = async function (req, res) {
|
||||
logger.debug("[WS]looking for LIVE session");
|
||||
res.handlerName = 'socketsLiveBySession';
|
||||
|
||||
const _projectKey = extractProjectKeyFromRequest(req);
|
||||
const _sessionId = extractSessionIdFromRequest(req);
|
||||
const filters = await extractPayloadFromRequest(req, res);
|
||||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
let sessInfo = await getParticularSession(_sessionId, filters);//(`${_projectKey}-${_sessionId}`, filters, true);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
return respond(req, res, null);
|
||||
}
|
||||
|
||||
// Sort by projectKey
|
||||
const autocomplete = async function (req, res) {
|
||||
logger.debug("[WS]autocomplete");
|
||||
res.handlerName = 'autocomplete';
|
||||
|
||||
const _projectKey = extractProjectKeyFromRequest(req);
|
||||
const filters = await extractPayloadFromRequest(req);
|
||||
let results = [];
|
||||
if (!hasQuery(filters)) {
|
||||
return respond(req, res, results);
|
||||
}
|
||||
|
||||
let connected_sockets = await fetchSockets();
|
||||
if (connected_sockets.length === 0) {
|
||||
return results;
|
||||
}
|
||||
|
||||
const rooms = new Map();
|
||||
for (let item of connected_sockets) {
|
||||
if (rooms.has(item.handshake.query.roomId)) {
|
||||
continue;
|
||||
}
|
||||
if (item.handshake.query.sessionInfo) {
|
||||
if ((item.handshake.query.projectKey !== _projectKey) || (item.handshake.query.identity !== IDENTITIES.session)) {
|
||||
continue;
|
||||
}
|
||||
// Mark this room as visited
|
||||
rooms.set(item.handshake.query.roomId, true);
|
||||
results.push(...getValidAttributes(item.handshake.query.sessionInfo, filters.query))
|
||||
}
|
||||
}
|
||||
|
||||
respond(req, res, uniqueAutocomplete(results));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
respond,
|
||||
socketsListByProject,
|
||||
socketsLiveByProject,
|
||||
socketsLiveBySession,
|
||||
autocomplete
|
||||
}
|
||||
250
ee/assist/utils/socketHandlers.js
Normal file
250
ee/assist/utils/socketHandlers.js
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
const {
|
||||
extractPeerId,
|
||||
} = require("./helper");
|
||||
const {
|
||||
IDENTITIES,
|
||||
EVENTS_DEFINITION,
|
||||
extractSessionInfo,
|
||||
errorHandler
|
||||
} = require("./assistHelper");
|
||||
const {
|
||||
startAssist,
|
||||
endAssist,
|
||||
handleEvent
|
||||
} = require("./stats");
|
||||
const {
|
||||
sendTo,
|
||||
sendFrom,
|
||||
fetchSockets,
|
||||
addSessionToCache,
|
||||
getSessionFromCache,
|
||||
removeSessionFromCache
|
||||
} = require('../utils/wsServer');
|
||||
const {
|
||||
IncreaseTotalWSConnections,
|
||||
IncreaseOnlineConnections,
|
||||
DecreaseOnlineConnections,
|
||||
IncreaseTotalRooms,
|
||||
IncreaseOnlineRooms,
|
||||
DecreaseOnlineRooms,
|
||||
} = require('../utils/metrics');
|
||||
const {logger} = require('./logger');
|
||||
const deepMerge = require('@fastify/deepmerge')({all: true});
|
||||
|
||||
const findSessionSocketId = async (roomId, tabId) => {
|
||||
let pickFirstSession = tabId === undefined;
|
||||
const connected_sockets = await fetchSockets(roomId);
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
if (pickFirstSession) {
|
||||
return socket.id;
|
||||
} else if (socket.handshake.query.tabId === tabId) {
|
||||
return socket.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
async function getRoomData(roomID) {
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
const connected_sockets = await fetchSockets(roomID);
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
tabsCount++;
|
||||
tabIDs.push(socket.handshake.query.tabId);
|
||||
} else {
|
||||
agentsCount++;
|
||||
agentIDs.push(socket.id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tabsCount = -1;
|
||||
agentsCount = -1;
|
||||
}
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs};
|
||||
}
|
||||
|
||||
function processNewSocket(socket) {
|
||||
socket._connectedAt = new Date();
|
||||
let {projectKey: connProjectKey, sessionId: connSessionId, tabId: connTabId} = extractPeerId(socket.handshake.query.peerId);
|
||||
socket.handshake.query.roomId = `${connProjectKey}-${connSessionId}`;
|
||||
socket.handshake.query.projectKey = connProjectKey;
|
||||
socket.handshake.query.sessId = connSessionId;
|
||||
socket.handshake.query.tabId = connTabId;
|
||||
logger.debug(`connProjectKey:${connProjectKey}, connSessionId:${connSessionId}, connTabId:${connTabId}, roomId:${socket.handshake.query.roomId}`);
|
||||
}
|
||||
|
||||
async function onConnect(socket) {
|
||||
logger.debug(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
processNewSocket(socket);
|
||||
IncreaseTotalWSConnections(socket.handshake.query.identity);
|
||||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
if (tabsCount > 0) {
|
||||
for (let tab of tabIDs) {
|
||||
if (tab === socket.handshake.query.tabId) {
|
||||
logger.debug(`session already connected, refusing new connexion, peerId: ${socket.handshake.query.peerId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
return socket.disconnect();
|
||||
}
|
||||
}
|
||||
}
|
||||
extractSessionInfo(socket);
|
||||
if (tabsCount < 0) {
|
||||
// New session creates new room
|
||||
IncreaseTotalRooms();
|
||||
IncreaseOnlineRooms();
|
||||
}
|
||||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
logger.debug(`notifying new session about agent-existence`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
logger.debug(`notifying new agent about no SESSIONS with peerId:${socket.handshake.query.peerId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
await socket.join(socket.handshake.query.roomId);
|
||||
|
||||
// Add session to cache
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
|
||||
}
|
||||
|
||||
logger.debug(`${socket.id} joined room:${socket.handshake.query.roomId}, as:${socket.handshake.query.identity}, connections:${agentsCount + tabsCount + 1}`)
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
if (socket.handshake.query.agentInfo !== undefined) {
|
||||
socket.handshake.query.agentInfo = JSON.parse(socket.handshake.query.agentInfo);
|
||||
socket.handshake.query.agentID = socket.handshake.query.agentInfo.id;
|
||||
// Stats
|
||||
startAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
}
|
||||
|
||||
// Set disconnect handler
|
||||
socket.on('disconnect', () => onDisconnect(socket));
|
||||
|
||||
// Handle update event
|
||||
socket.on(EVENTS_DEFINITION.listen.UPDATE_EVENT, (...args) => onUpdateEvent(socket, ...args));
|
||||
|
||||
// Handle webrtc events
|
||||
socket.on(EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, (...args) => onWebrtcAgentHandler(socket, ...args));
|
||||
|
||||
// Handle errors
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
|
||||
|
||||
// Handle all other events
|
||||
socket.onAny((eventName, ...args) => onAny(socket, eventName, ...args));
|
||||
}
|
||||
|
||||
async function onDisconnect(socket) {
|
||||
DecreaseOnlineConnections(socket.handshake.query.identity);
|
||||
logger.debug(`${socket.id} disconnected from ${socket.handshake.query.roomId}`);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
// Stats
|
||||
endAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
logger.debug("checking for number of connected agents and sessions");
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
|
||||
|
||||
if (tabsCount <= 0) {
|
||||
await removeSessionFromCache(socket.handshake.query.sessId);
|
||||
}
|
||||
|
||||
if (tabsCount === -1 && agentsCount === -1) {
|
||||
DecreaseOnlineRooms();
|
||||
logger.debug(`room not found: ${socket.handshake.query.roomId}`);
|
||||
return;
|
||||
}
|
||||
if (tabsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no SESSIONS`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
if (agentsCount === 0) {
|
||||
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no AGENTS`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
}
|
||||
}
|
||||
|
||||
async function onUpdateEvent(socket, ...args) {
|
||||
logger.debug(`${socket.id} sent update event.`);
|
||||
if (socket.handshake.query.identity !== IDENTITIES.session) {
|
||||
logger.debug('Ignoring update event.');
|
||||
return
|
||||
}
|
||||
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
socket.handshake.query.sessionInfo = deepMerge(socket.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
|
||||
// update session cache
|
||||
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
|
||||
|
||||
// Update sessionInfo for all agents in the room
|
||||
const connected_sockets = await fetchSockets(socket.handshake.query.roomId);
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
item.handshake.query.sessionInfo = deepMerge(item.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
} else if (item.handshake.query.identity === IDENTITIES.agent) {
|
||||
sendFrom(socket, item.id, EVENTS_DEFINITION.emit.UPDATE_EVENT, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function onWebrtcAgentHandler(socket, ...args) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
const agentIdToConnect = args[0]?.data?.toAgentId;
|
||||
logger.debug(`${socket.id} sent webrtc event to agent:${agentIdToConnect}`);
|
||||
if (agentIdToConnect && socket.handshake.sessionData.AGENTS_CONNECTED.includes(agentIdToConnect)) {
|
||||
sendFrom(socket, agentIdToConnect, EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function onAny(socket, eventName, ...args) {
|
||||
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
|
||||
logger.debug(`received event:${eventName}, should be handled by another listener, stopping onAny.`);
|
||||
return
|
||||
}
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to room:${socket.handshake.query.roomId}`);
|
||||
sendFrom(socket, socket.handshake.query.roomId, eventName, args[0]);
|
||||
} else {
|
||||
// Stats
|
||||
handleEvent(eventName, socket, args[0]);
|
||||
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to session of room:${socket.handshake.query.roomId}`);
|
||||
let socketId = await findSessionSocketId(socket.handshake.query.roomId, args[0]?.meta?.tabId);
|
||||
if (socketId === null) {
|
||||
logger.debug(`session not found for:${socket.handshake.query.roomId}`);
|
||||
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
} else {
|
||||
logger.debug("message sent");
|
||||
sendTo(socket.id, eventName, socket.id, args[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Back compatibility (add top layer with meta information)
|
||||
function updateSessionData(socket, sessionData) {
|
||||
if (sessionData?.meta === undefined && socket.handshake.query.identity === IDENTITIES.session) {
|
||||
sessionData = {meta: {tabId: socket.handshake.query.tabId, version: 1}, data: sessionData};
|
||||
}
|
||||
return sessionData
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
onConnect,
|
||||
}
|
||||
|
|
@ -4,91 +4,163 @@ const {logger} = require('./logger');
|
|||
|
||||
let io;
|
||||
|
||||
const getServer = function () {
|
||||
return io;
|
||||
}
|
||||
const useRedis = process.env.redis === "true";
|
||||
const useStickySessions = process.env.stickySessions === "true";
|
||||
let inMemorySocketsCache = [];
|
||||
let lastCacheUpdateTime = 0;
|
||||
const CACHE_REFRESH_INTERVAL = parseInt(process.env.cacheRefreshInterval) || 5000;
|
||||
|
||||
let redisClient;
|
||||
const useRedis = process.env.redis === "true";
|
||||
|
||||
if (useRedis) {
|
||||
const {createClient} = require("redis");
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
redisClient = createClient({url: REDIS_URL});
|
||||
redisClient.on("error", (error) => logger.error(`Redis error : ${error}`));
|
||||
redisClient.on("error", (error) => logger.error(`Redis cache error : ${error}`));
|
||||
void redisClient.connect();
|
||||
}
|
||||
|
||||
const processSocketsList = function (sockets) {
|
||||
let res = []
|
||||
for (let socket of sockets) {
|
||||
let {handshake} = socket;
|
||||
res.push({handshake});
|
||||
const addSessionToCache = async function (sessionID, sessionData) {
|
||||
try {
|
||||
await redisClient.set(`active_sessions:${sessionID}`, JSON.stringify(sessionData), 'EX', 3600); // 60 minutes
|
||||
logger.debug(`Session ${sessionID} stored in Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
const getSessionFromCache = async function (sessionID) {
|
||||
try {
|
||||
const sessionData = await redisClient.get(`active_sessions:${sessionID}`);
|
||||
if (sessionData) {
|
||||
logger.debug(`Session ${sessionID} retrieved from Redis`);
|
||||
return JSON.parse(sessionData);
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const removeSessionFromCache = async function (sessionID) {
|
||||
try {
|
||||
await redisClient.del(`active_sessions:${sessionID}`);
|
||||
logger.debug(`Session ${sessionID} removed from Redis`);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
const now = Date.now();
|
||||
logger.info(`Using in-memory cache (age: ${now - lastCacheUpdateTime}ms)`);
|
||||
return inMemorySocketsCache;
|
||||
} else {
|
||||
try {
|
||||
let cachedResult = await redisClient.get('fetchSocketsResult');
|
||||
if (cachedResult) {
|
||||
return JSON.parse(cachedResult);
|
||||
}
|
||||
let result = await io.fetchSockets();
|
||||
let cachedString = JSON.stringify(processSocketsList(result));
|
||||
await redisClient.set('fetchSocketsResult', cachedString, {EX: 5});
|
||||
return result;
|
||||
return await io.fetchSockets();
|
||||
} catch (error) {
|
||||
logger.error('Error setting value with expiration:', error);
|
||||
logger.error('Error fetching sockets:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
// Background refresher that runs independently of requests
|
||||
let cacheRefresher = null;
|
||||
function startCacheRefresher() {
|
||||
if (cacheRefresher) clearInterval(cacheRefresher);
|
||||
|
||||
cacheRefresher = setInterval(async () => {
|
||||
const now = Date.now();
|
||||
// Only refresh if cache is stale
|
||||
if (now - lastCacheUpdateTime >= CACHE_REFRESH_INTERVAL) {
|
||||
logger.debug('Background refresh triggered');
|
||||
try {
|
||||
const startTime = performance.now();
|
||||
const result = await io.fetchSockets();
|
||||
inMemorySocketsCache = result;
|
||||
lastCacheUpdateTime = now;
|
||||
const duration = performance.now() - startTime;
|
||||
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
|
||||
} catch (error) {
|
||||
logger.error(`Background refresh error: ${error}`);
|
||||
}
|
||||
}
|
||||
}, CACHE_REFRESH_INTERVAL / 2);
|
||||
}
|
||||
|
||||
function sendFrom(from, to, eventName, ...data) {
|
||||
if (useStickySessions) {
|
||||
from.local.to(to).emit(eventName, ...data);
|
||||
} else {
|
||||
from.to(to).emit(eventName, ...data);
|
||||
}
|
||||
}
|
||||
|
||||
function sendTo(to, eventName, ...data) {
|
||||
sendFrom(io, to, eventName, ...data);
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID, all=false) {
|
||||
if (!io) {
|
||||
return [];
|
||||
}
|
||||
if (!roomID) {
|
||||
return await doFetchAllSockets();
|
||||
}
|
||||
return await io.in(roomID).fetchSockets();
|
||||
try {
|
||||
if (useStickySessions && !all) {
|
||||
return await io.local.in(roomID).fetchSockets();
|
||||
} else {
|
||||
return await io.in(roomID).fetchSockets();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching sockets:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (io) {
|
||||
return io;
|
||||
}
|
||||
|
||||
// Common options for both initialization methods
|
||||
const options = {
|
||||
maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6,
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST", "PUT"],
|
||||
credentials: true
|
||||
},
|
||||
path: (prefix ? prefix : '') + '/socket',
|
||||
...getCompressionConfig()
|
||||
};
|
||||
|
||||
if (process.env.uws !== "true") {
|
||||
io = _io(server, {
|
||||
maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6,
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST", "PUT"],
|
||||
credentials: true
|
||||
},
|
||||
path: (prefix ? prefix : '') + '/socket',
|
||||
...getCompressionConfig()
|
||||
});
|
||||
io = _io(server, options);
|
||||
} else {
|
||||
io = new _io.Server({
|
||||
maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6,
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST", "PUT"],
|
||||
credentials: true
|
||||
},
|
||||
path: (prefix ? prefix : '') + '/socket',
|
||||
...getCompressionConfig()
|
||||
});
|
||||
io = new _io.Server(options);
|
||||
io.attachApp(server);
|
||||
}
|
||||
|
||||
io.engine.on("headers", (headers) => {
|
||||
headers["x-host-id"] = process.env.HOSTNAME || "unknown";
|
||||
});
|
||||
|
||||
if (useRedis) {
|
||||
startCacheRefresher();
|
||||
}
|
||||
return io;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createSocketIOServer,
|
||||
getServer,
|
||||
sendTo,
|
||||
sendFrom,
|
||||
fetchSockets,
|
||||
addSessionToCache,
|
||||
getSessionFromCache,
|
||||
removeSessionFromCache,
|
||||
}
|
||||
|
|
@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
|
|||
|
||||
// For the ender service only
|
||||
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
||||
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
|
||||
rows, err := s.db.Query(`
|
||||
SELECT
|
||||
session_id,
|
||||
CASE
|
||||
WHEN duration IS NULL OR duration < 0 THEN 0
|
||||
ELSE duration
|
||||
END,
|
||||
start_ts
|
||||
FROM sessions
|
||||
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
|
|
@ -654,6 +654,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
|||
ORDER BY (project_id, event_name);
|
||||
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
||||
-- The full list of properties (events and users)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||
(
|
||||
|
|
|
|||
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
\set previous_version 'v1.22.0-ee'
|
||||
\set next_version 'v1.23.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
--
|
||||
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
\set or_version 'v1.22.0-ee'
|
||||
\set or_version 'v1.23.0-ee'
|
||||
SET client_min_messages TO NOTICE;
|
||||
\set ON_ERROR_STOP true
|
||||
SELECT EXISTS (SELECT 1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.event_properties;
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
\set previous_version 'v1.23.0-ee'
|
||||
\set next_version 'v1.22.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -82,7 +82,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
{ stream: MediaStream; isAgent: boolean }[] | null
|
||||
>([]);
|
||||
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null>(
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null | undefined>(
|
||||
null,
|
||||
);
|
||||
|
||||
|
|
@ -135,6 +135,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
}, [peerConnectionStatus]);
|
||||
|
||||
const addIncomeStream = (stream: MediaStream, isAgent: boolean) => {
|
||||
if (!stream.active) return;
|
||||
setIncomeStream((oldState) => {
|
||||
if (oldState === null) return [{ stream, isAgent }];
|
||||
if (
|
||||
|
|
@ -149,13 +150,8 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
});
|
||||
};
|
||||
|
||||
const removeIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream((prevState) => {
|
||||
if (!prevState) return [];
|
||||
return prevState.filter(
|
||||
(existingStream) => existingStream.stream.id !== stream.id,
|
||||
);
|
||||
});
|
||||
const removeIncomeStream = () => {
|
||||
setIncomeStream([]);
|
||||
};
|
||||
|
||||
function onReject() {
|
||||
|
|
@ -181,7 +177,12 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream(lStream.stream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
onReject,
|
||||
onError,
|
||||
|
|
|
|||
|
|
@ -34,43 +34,40 @@ function VideoContainer({
|
|||
}
|
||||
const iid = setInterval(() => {
|
||||
const track = stream.getVideoTracks()[0];
|
||||
const settings = track?.getSettings();
|
||||
const isDummyVideoTrack = settings
|
||||
? settings.width === 2 ||
|
||||
settings.frameRate === 0 ||
|
||||
(!settings.frameRate && !settings.width)
|
||||
: true;
|
||||
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
|
||||
|
||||
if (isEnabled !== shouldBeEnabled) {
|
||||
setEnabled(shouldBeEnabled);
|
||||
setRemoteEnabled?.(shouldBeEnabled);
|
||||
if (track) {
|
||||
if (!track.enabled) {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
} else {
|
||||
setEnabled(true);
|
||||
setRemoteEnabled?.(true);
|
||||
}
|
||||
} else {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
}
|
||||
}, 500);
|
||||
return () => clearInterval(iid);
|
||||
}, [stream, isEnabled]);
|
||||
}, [stream]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex-1"
|
||||
style={{
|
||||
display: isEnabled ? undefined : 'none',
|
||||
width: isEnabled ? undefined : '0px!important',
|
||||
height: isEnabled ? undefined : '0px!important',
|
||||
height: isEnabled ? undefined : '0px !important',
|
||||
border: '1px solid grey',
|
||||
transform: local ? 'scaleX(-1)' : undefined,
|
||||
display: isEnabled ? 'block' : 'none',
|
||||
}}
|
||||
>
|
||||
<video autoPlay ref={ref} muted={muted} style={{ height }} />
|
||||
{isAgent ? (
|
||||
<div
|
||||
style={{
|
||||
position: 'absolute',
|
||||
}}
|
||||
>
|
||||
{t('Agent')}
|
||||
</div>
|
||||
) : null}
|
||||
<video
|
||||
autoPlay
|
||||
ref={ref}
|
||||
muted={muted}
|
||||
style={{ height }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ProfilerDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerProfiler from '@openreplay/tracker-profiler';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
|
|||
const fn = profiler('call_name')(() => {
|
||||
//...
|
||||
}, thisArg); // thisArg is optional`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
|
|||
|
||||
function AssistNpm(props) {
|
||||
const { t } = useTranslation();
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerAssist from '@openreplay/tracker-assist';
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}',
|
||||
});
|
||||
tracker.start()
|
||||
|
||||
tracker.use(trackerAssist(options)); // check the list of available options below`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerAssist from '@openreplay/tracker-assist/cjs';
|
||||
const tracker = new OpenReplay({
|
||||
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}'
|
||||
});
|
||||
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below
|
||||
|
|
|
|||
|
|
@ -14,19 +14,20 @@ function GraphQLDoc() {
|
|||
const projectKey = siteId
|
||||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
//...
|
||||
export const recordGraphQL = tracker.use(trackerGraphQL());`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -15,20 +15,21 @@ function MobxDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
|
||||
const mobxUsage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerMobX from '@openreplay/tracker-mobx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
tracker.start();
|
||||
`;
|
||||
|
||||
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerMobX from '@openreplay/tracker-mobx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function NgRxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
const usage = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
|
|||
export class AppModule {}`;
|
||||
const usageCjs = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -17,10 +17,10 @@ function PiniaDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ReduxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerRedux from '@openreplay/tracker-redux';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = createStore(
|
|||
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
|
||||
);`;
|
||||
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerRedux from '@openreplay/tracker-redux/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function VueDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = new Vuex.Store({
|
|||
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
|
||||
});`;
|
||||
const usageCjs = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,11 +16,10 @@ function ZustandDoc(props) {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
@ -43,11 +42,12 @@ const useBearStore = create(
|
|||
)
|
||||
`;
|
||||
const usageCjs = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import { useTranslation } from 'react-i18next';
|
|||
const initTableProps = [
|
||||
{
|
||||
title: <span className="font-medium">Series</span>,
|
||||
_pureTitle: 'Series',
|
||||
dataIndex: 'seriesName',
|
||||
key: 'seriesName',
|
||||
sorter: (a, b) => a.seriesName.localeCompare(b.seriesName),
|
||||
|
|
@ -18,6 +19,7 @@ const initTableProps = [
|
|||
},
|
||||
{
|
||||
title: <span className="font-medium">Avg.</span>,
|
||||
_pureTitle: 'Avg.',
|
||||
dataIndex: 'average',
|
||||
key: 'average',
|
||||
sorter: (a, b) => a.average - b.average,
|
||||
|
|
@ -94,6 +96,8 @@ function WidgetDatatable(props: Props) {
|
|||
tableCols.push({
|
||||
title: <span className="font-medium">{name}</span>,
|
||||
dataIndex: `${name}_${i}`,
|
||||
// @ts-ignore
|
||||
_pureTitle: name,
|
||||
key: `${name}_${i}`,
|
||||
sorter: (a, b) => a[`${name}_${i}`] - b[`${name}_${i}`],
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,52 +1,80 @@
|
|||
import React, { useEffect } from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { Form, Input, Loader, Icon, Message } from 'UI';
|
||||
import { Button } from 'antd';
|
||||
import { validatePassword } from 'App/validate';
|
||||
import { PASSWORD_POLICY } from 'App/constants';
|
||||
import stl from './forgotPassword.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
const recaptchaRef = React.createRef();
|
||||
const ERROR_DONT_MATCH = (t) => t("Passwords don't match.");
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
|
||||
interface Props {
|
||||
params: any;
|
||||
}
|
||||
function CreatePassword(props: Props) {
|
||||
|
||||
function CreatePassword(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { params } = props;
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { resetPassword } = userStore;
|
||||
const [error, setError] = React.useState<string | null>(null);
|
||||
const [validationError, setValidationError] = React.useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [updated, setUpdated] = React.useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = React.useState('');
|
||||
const [password, setPassword] = React.useState('');
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [validationError, setValidationError] = useState<string | null>(null);
|
||||
const [updated, setUpdated] = useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
|
||||
const pass = params.get('pass');
|
||||
const invitation = params.get('invitation');
|
||||
|
||||
const handleSubmit = () => {
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
if (!validatePassword(password)) {
|
||||
return;
|
||||
}
|
||||
void resetPassword({ invitation, pass, password });
|
||||
|
||||
resetPassword({
|
||||
invitation,
|
||||
pass,
|
||||
password,
|
||||
'g-recaptcha-response': token
|
||||
})
|
||||
.then(() => {
|
||||
setUpdated(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = (e: any) => {
|
||||
e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
const onSubmit = () => {
|
||||
// Validate before attempting captcha verification
|
||||
if (!validatePassword(password) || password !== passwordRepeat) {
|
||||
setValidationError(
|
||||
password !== passwordRepeat
|
||||
? ERROR_DONT_MATCH(t)
|
||||
: PASSWORD_POLICY(t)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset any previous errors
|
||||
setError(null);
|
||||
setValidationError(null);
|
||||
|
||||
submitWithCaptcha({ pass, invitation, password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
// The component will handle showing appropriate messages
|
||||
});
|
||||
};
|
||||
|
||||
const write = (e: any) => {
|
||||
|
|
@ -63,7 +91,7 @@ function CreatePassword(props: Props) {
|
|||
} else {
|
||||
setValidationError(null);
|
||||
}
|
||||
}, [passwordRepeat, password]);
|
||||
}, [passwordRepeat, password, t]);
|
||||
|
||||
return (
|
||||
<Form
|
||||
|
|
@ -73,19 +101,8 @@ function CreatePassword(props: Props) {
|
|||
>
|
||||
{!error && (
|
||||
<>
|
||||
<Loader loading={loading}>
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div data-hidden={updated} className="w-full">
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className={stl.recaptcha}>
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Form.Field>
|
||||
<label>{t('New password')}</label>
|
||||
<Input
|
||||
|
|
@ -132,10 +149,15 @@ function CreatePassword(props: Props) {
|
|||
<Button
|
||||
htmlType="submit"
|
||||
type="primary"
|
||||
loading={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha || validationError !== null}
|
||||
className="w-full mt-4"
|
||||
>
|
||||
{t('Create')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Create')}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
|
|
@ -153,4 +175,4 @@ function CreatePassword(props: Props) {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(CreatePassword);
|
||||
export default withCaptcha(observer(CreatePassword));
|
||||
|
|
|
|||
|
|
@ -1,24 +1,26 @@
|
|||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import { Loader, Icon } from 'UI';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { Form, Input, Button, Typography } from 'antd';
|
||||
import { SquareArrowOutUpRight } from 'lucide-react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
function ResetPasswordRequest() {
|
||||
interface Props {
|
||||
}
|
||||
|
||||
function ResetPasswordRequest(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { requestResetPassword } = userStore;
|
||||
const recaptchaRef = React.createRef();
|
||||
const [requested, setRequested] = React.useState(false);
|
||||
const [email, setEmail] = React.useState('');
|
||||
const [error, setError] = React.useState(null);
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
const [smtpError, setSmtpError] = React.useState<boolean>(false);
|
||||
const [requested, setRequested] = useState(false);
|
||||
const [email, setEmail] = useState('');
|
||||
const [error, setError] = useState(null);
|
||||
const [smtpError, setSmtpError] = useState<boolean>(false);
|
||||
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const write = (e: any) => {
|
||||
const { name, value } = e.target;
|
||||
|
|
@ -26,22 +28,21 @@ function ResetPasswordRequest() {
|
|||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
// e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
// Validation check
|
||||
if (!email || email.trim() === '') {
|
||||
return;
|
||||
}
|
||||
|
||||
submitWithCaptcha({ email: email.trim() })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
});
|
||||
};
|
||||
|
||||
const handleSubmit = (token?: any) => {
|
||||
if (
|
||||
CAPTCHA_ENABLED &&
|
||||
recaptchaRef.current &&
|
||||
(token === null || token === undefined)
|
||||
)
|
||||
return;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
setError(null);
|
||||
requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token })
|
||||
.catch((err: any) => {
|
||||
|
|
@ -50,29 +51,21 @@ function ResetPasswordRequest() {
|
|||
}
|
||||
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
})
|
||||
.finally(() => {
|
||||
setRequested(true);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
onFinish={onSubmit}
|
||||
style={{ minWidth: '50%' }}
|
||||
className="flex flex-col"
|
||||
>
|
||||
<Loader loading={false}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className="flex justify-center">
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
data-hidden={requested}
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
{!requested && (
|
||||
<>
|
||||
<Form.Item>
|
||||
|
|
@ -92,10 +85,14 @@ function ResetPasswordRequest() {
|
|||
<Button
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Email Password Reset Link')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Email Password Reset Link')}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
|
@ -146,4 +143,4 @@ function ResetPasswordRequest() {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(ResetPasswordRequest);
|
||||
export default withCaptcha(observer(ResetPasswordRequest));
|
||||
|
|
|
|||
|
|
@ -1,23 +1,18 @@
|
|||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import cn from 'classnames';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
// Consider using a different approach for titles in functional components
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { toast } from 'react-toastify';
|
||||
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import { forgotPassword, signup } from 'App/routes';
|
||||
import { Icon, Link, Loader, Tooltip } from 'UI';
|
||||
import { Icon, Link, Loader } from 'UI';
|
||||
import { Button, Form, Input } from 'antd';
|
||||
|
||||
import Copyright from 'Shared/Copyright';
|
||||
|
||||
import stl from './login.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useStore } from 'App/mstore';
|
||||
import LanguageSwitcher from '../LanguageSwitcher';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
import SSOLogin from './SSOLogin';
|
||||
|
||||
const FORGOT_PASSWORD = forgotPassword();
|
||||
const SIGNUP_ROUTE = signup();
|
||||
|
|
@ -26,14 +21,15 @@ interface LoginProps {
|
|||
location: Location;
|
||||
}
|
||||
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
|
||||
function Login({ location }: LoginProps) {
|
||||
function Login({
|
||||
location,
|
||||
submitWithCaptcha,
|
||||
isVerifyingCaptcha,
|
||||
resetCaptcha,
|
||||
}: LoginProps & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const [email, setEmail] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
// const CAPTCHA_ENABLED = useMemo(() => window.env.CAPTCHA_ENABLED === 'true', []);
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
const { loginStore, userStore } = useStore();
|
||||
const { errors } = userStore.loginRequest;
|
||||
const { loading } = loginStore;
|
||||
|
|
@ -49,7 +45,6 @@ function Login({ location }: LoginProps) {
|
|||
}, [authDetails]);
|
||||
|
||||
useEffect(() => {
|
||||
// void fetchTenants();
|
||||
const jwt = params.get('jwt');
|
||||
const spotJwt = params.get('spotJwt');
|
||||
if (spotJwt) {
|
||||
|
|
@ -108,32 +103,36 @@ function Login({ location }: LoginProps) {
|
|||
if (resp) {
|
||||
userStore.syntheticLogin(resp);
|
||||
setJwt({ jwt: resp.jwt, spotJwt: resp.spotJwt ?? null });
|
||||
handleSpotLogin(resp.spotJwt);
|
||||
if (resp.spotJwt) {
|
||||
handleSpotLogin(resp.spotJwt);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
userStore.syntheticLoginError(e);
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
if (!email || !password) {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
const ssoLink =
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
submitWithCaptcha({ email: email.trim(), password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha error:', error);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center h-screen">
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="m-10 ">
|
||||
<img src="/assets/logo.svg" width={200} />
|
||||
<img src="/assets/logo.svg" width={200} alt="Company Logo" />
|
||||
</div>
|
||||
<div className="border rounded-lg bg-white shadow-sm">
|
||||
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
|
||||
|
|
@ -145,15 +144,7 @@ function Login({ location }: LoginProps) {
|
|||
className={cn('flex items-center justify-center flex-col')}
|
||||
style={{ width: '350px' }}
|
||||
>
|
||||
<Loader loading={loading}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={window.env.CAPTCHA_SITE_KEY}
|
||||
onChange={(token) => handleSubmit(token)}
|
||||
/>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div style={{ width: '350px' }} className="px-8">
|
||||
<Form.Item>
|
||||
<label>{t('Email Address')}</label>
|
||||
|
|
@ -186,8 +177,8 @@ function Login({ location }: LoginProps) {
|
|||
</Loader>
|
||||
{errors && errors.length ? (
|
||||
<div className="px-8 my-2 w-full">
|
||||
{errors.map((error) => (
|
||||
<div className="flex items-center bg-red-lightest rounded p-3">
|
||||
{errors.map((error, index) => (
|
||||
<div key={index} className="flex items-center bg-red-lightest rounded p-3">
|
||||
<Icon name="info" color="red" size="20" />
|
||||
<span className="color-red ml-2">
|
||||
{error}
|
||||
|
|
@ -204,8 +195,14 @@ function Login({ location }: LoginProps) {
|
|||
className="mt-2 w-full text-center rounded-lg"
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Login')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Logging in...')
|
||||
: t('Login')}
|
||||
</Button>
|
||||
|
||||
<div className="my-8 flex justify-center items-center flex-wrap">
|
||||
|
|
@ -219,63 +216,12 @@ function Login({ location }: LoginProps) {
|
|||
</div>
|
||||
</Form>
|
||||
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
delay={0}
|
||||
title={
|
||||
<div className="text-center">
|
||||
{authDetails.edition === 'ee' ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
className="pointer-events-none opacity-30"
|
||||
>
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn('flex items-center w-96 justify-center my-8', {
|
||||
'!hidden': !authDetails?.enforceSSO,
|
||||
})}
|
||||
>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
<SSOLogin authDetails={authDetails} />
|
||||
</div>
|
||||
|
||||
{authDetails?.enforceSSO && (
|
||||
<SSOLogin authDetails={authDetails} enforceSSO={true} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
@ -287,4 +233,6 @@ function Login({ location }: LoginProps) {
|
|||
);
|
||||
}
|
||||
|
||||
export default withPageTitle('Login - OpenReplay')(observer(Login));
|
||||
export default withPageTitle('Login - OpenReplay')(
|
||||
withCaptcha(observer(Login))
|
||||
);
|
||||
|
|
|
|||
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import React from 'react';
|
||||
import cn from 'classnames';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import stl from './login.module.css';
|
||||
import { useStore } from 'App/mstore';
|
||||
|
||||
interface SSOLoginProps {
|
||||
authDetails: any;
|
||||
enforceSSO?: boolean;
|
||||
}
|
||||
|
||||
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||
const { userStore } = useStore();
|
||||
const { t } = useTranslation();
|
||||
const { isEnterprise } = userStore;
|
||||
|
||||
const getSSOLink = () =>
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
|
||||
const ssoLink = getSSOLink();
|
||||
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`;
|
||||
|
||||
if (enforceSSO) {
|
||||
return (
|
||||
<div className={cn('flex items-center w-96 justify-center my-8')}>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">{ssoButtonText}</Button>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
title={
|
||||
<div className="text-center">
|
||||
{isEnterprise ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<span className="cursor-not-allowed">
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
disabled={true}
|
||||
>
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SSOLogin;
|
||||
|
|
@ -1,16 +1,14 @@
|
|||
import React from 'react';
|
||||
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router';
|
||||
import { withRouter } from 'react-router-dom';
|
||||
|
||||
import { OB_TABS, onboarding as onboardingRoute, withSiteId } from 'App/routes';
|
||||
import { Icon } from 'UI';
|
||||
|
||||
import IdentifyUsersTab from './components/IdentifyUsersTab';
|
||||
import InstallOpenReplayTab from './components/InstallOpenReplayTab';
|
||||
import IntegrationsTab from './components/IntegrationsTab';
|
||||
import ManageUsersTab from './components/ManageUsersTab';
|
||||
import SideMenu from './components/SideMenu';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Smartphone, AppWindow } from 'lucide-react';
|
||||
|
||||
interface Props {
|
||||
match: {
|
||||
|
|
@ -33,7 +31,7 @@ function Onboarding(props: Props) {
|
|||
{
|
||||
label: (
|
||||
<div className="font-semibold flex gap-2 items-center">
|
||||
<Icon name="browser/browser" size={16} />
|
||||
<AppWindow size={16} />
|
||||
{t('Web')}
|
||||
</div>
|
||||
),
|
||||
|
|
@ -42,7 +40,7 @@ function Onboarding(props: Props) {
|
|||
{
|
||||
label: (
|
||||
<div className="font-semibold flex gap-2 items-center">
|
||||
<Icon name="mobile" size={16} />
|
||||
<Smartphone size={16} />
|
||||
{t('Mobile')}
|
||||
</div>
|
||||
),
|
||||
|
|
|
|||
|
|
@ -130,18 +130,20 @@ function IdentifyUsersTab(props: Props) {
|
|||
'To identify users through metadata, you will have to explicitly specify your user metadata so it can be injected during sessions. Follow the below steps',
|
||||
)}
|
||||
</p>
|
||||
<div className="flex items-start">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<CircleNumber text="1" />
|
||||
<MetadataList />
|
||||
</div>
|
||||
|
||||
<div className="my-6" />
|
||||
<div className="flex items-start">
|
||||
<CircleNumber text="2" />
|
||||
<div className="pt-1 w-full">
|
||||
<div>
|
||||
<CircleNumber text="2" />
|
||||
<span className="font-bold">
|
||||
{t('Inject metadata when recording sessions')}
|
||||
</span>
|
||||
</div>
|
||||
<div className="pt-1 w-full">
|
||||
<div className="my-2">
|
||||
{t('Use the')}
|
||||
<span className="highlight-blue">setMetadata</span>{' '}
|
||||
|
|
|
|||
|
|
@ -55,16 +55,14 @@ function MetadataList() {
|
|||
<Button type="default" onClick={() => openModal()}>
|
||||
{t('Add Metadata')}
|
||||
</Button>
|
||||
<div className="flex ml-2">
|
||||
{fields.map((f, index) => (
|
||||
<TagBadge
|
||||
key={index}
|
||||
text={f.key}
|
||||
onRemove={() => removeMetadata(f)}
|
||||
outline
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
{fields.map((f, index) => (
|
||||
<TagBadge
|
||||
key={index}
|
||||
text={f.key}
|
||||
onRemove={() => removeMetadata(f)}
|
||||
outline
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
|
|||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
const installationCommand = 'npm i @openreplay/tracker';
|
||||
const usageCode = `import Tracker from '@openreplay/tracker';
|
||||
const usageCode = `import { tracker } from '@openreplay/tracker';
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
tracker.start()`;
|
||||
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
|
||||
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ const AUTOREFRESH_INTERVAL = 2 * 60 * 1000;
|
|||
const PER_PAGE = 10;
|
||||
|
||||
function LiveSessionList() {
|
||||
const { searchStoreLive, sessionStore, customFieldStore } = useStore();
|
||||
const { searchStoreLive, sessionStore, customFieldStore, projectsStore } = useStore();
|
||||
const filter = searchStoreLive.instance;
|
||||
const list = sessionStore.liveSessions;
|
||||
const { totalLiveSessions } = sessionStore;
|
||||
|
|
@ -72,6 +72,12 @@ function LiveSessionList() {
|
|||
void searchStoreLive.fetchSessions();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (projectsStore.activeSiteId) {
|
||||
void searchStoreLive.fetchSessions(true);
|
||||
}
|
||||
}, [projectsStore.activeSiteId])
|
||||
|
||||
const onUserClick = (userId: string, userAnonymousId: string) => {
|
||||
if (userId) {
|
||||
searchStoreLive.addFilterByKeyAndValue(FilterKey.USERID, userId);
|
||||
|
|
@ -98,7 +104,7 @@ function LiveSessionList() {
|
|||
<div>
|
||||
<div className="bg-white py-3 rounded-lg border shadow-sm">
|
||||
<div className="flex mb-4 pb-2 px-3 justify-between items-center border-b border-b-gray-lighter">
|
||||
<LiveSessionReloadButton onClick={refetch} />
|
||||
<LiveSessionReloadButton />
|
||||
<div className="flex items-center">
|
||||
<div className="flex items-center ml-6">
|
||||
<span className="mr-2 color-gray-medium">{t('Sort By')}</span>
|
||||
|
|
|
|||
|
|
@ -4,15 +4,11 @@ import { observer } from 'mobx-react-lite';
|
|||
import ReloadButton from '../ReloadButton';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function LiveSessionReloadButton(props: Props) {
|
||||
function LiveSessionReloadButton() {
|
||||
const { t } = useTranslation();
|
||||
const { sessionStore } = useStore();
|
||||
const { onClick } = props;
|
||||
const loading = sessionStore.loadingLiveSessions;
|
||||
const { searchStoreLive } = useStore();
|
||||
const onClick = searchStoreLive.fetchSessions
|
||||
const loading = searchStoreLive.loading;
|
||||
return (
|
||||
<ReloadButton label={t('Refresh')} buttonSize={'small'} iconSize={14} loading={loading} onClick={onClick} className="cursor-pointer" />
|
||||
);
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ export default function ReloadButton(props: Props) {
|
|||
<Button
|
||||
type="default"
|
||||
size={buttonSize}
|
||||
loading={loading}
|
||||
onClick={onClick}
|
||||
icon={<SyncOutlined style={{ fontSize: iconSize }} />}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -5,17 +5,18 @@ import stl from './installDocs.module.css';
|
|||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
const installationCommand = 'npm i @openreplay/tracker';
|
||||
const usageCode = `import Tracker from '@openreplay/tracker';
|
||||
const usageCode = `import { tracker } from '@openreplay/tracker';
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
||||
tracker.start()`;
|
||||
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
|
||||
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ const Input = React.forwardRef((props: Props, ref: any) => {
|
|||
{icon && (
|
||||
<Icon
|
||||
name={icon}
|
||||
className="absolute top-0 bottom-0 my-auto ml-4"
|
||||
className="absolute top-0 bottom-0 my-auto ml-4 z-10"
|
||||
size="14"
|
||||
/>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -28,18 +28,18 @@ export const checkValues = (key: any, value: any) => {
|
|||
};
|
||||
|
||||
export const filterMap = ({
|
||||
category,
|
||||
value,
|
||||
key,
|
||||
operator,
|
||||
sourceOperator,
|
||||
source,
|
||||
custom,
|
||||
isEvent,
|
||||
filters,
|
||||
sort,
|
||||
order
|
||||
}: any) => ({
|
||||
category,
|
||||
value,
|
||||
key,
|
||||
operator,
|
||||
sourceOperator,
|
||||
source,
|
||||
custom,
|
||||
isEvent,
|
||||
filters,
|
||||
sort,
|
||||
order
|
||||
}: any) => ({
|
||||
value: checkValues(key, value),
|
||||
custom,
|
||||
type: category === FilterCategory.METADATA ? FilterKey.METADATA : key,
|
||||
|
|
@ -254,7 +254,7 @@ class SearchStore {
|
|||
|
||||
this.savedSearch = new SavedSearch({});
|
||||
sessionStore.clearList();
|
||||
void this.fetchSessions(true);
|
||||
// void this.fetchSessions(true);
|
||||
}
|
||||
|
||||
async checkForLatestSessionCount(): Promise<void> {
|
||||
|
|
|
|||
|
|
@ -75,6 +75,8 @@ class SearchStoreLive {
|
|||
|
||||
loadingFilterSearch = false;
|
||||
|
||||
loading = false;
|
||||
|
||||
constructor() {
|
||||
makeAutoObservable(this);
|
||||
|
||||
|
|
@ -242,11 +244,25 @@ class SearchStoreLive {
|
|||
});
|
||||
};
|
||||
|
||||
async fetchSessions() {
|
||||
await sessionStore.fetchLiveSessions({
|
||||
...this.instance.toSearch(),
|
||||
page: this.currentPage,
|
||||
});
|
||||
setLoading = (val: boolean) => {
|
||||
this.loading = val;
|
||||
}
|
||||
|
||||
fetchSessions = async (force?: boolean) => {
|
||||
if (!force && this.loading) {
|
||||
return;
|
||||
}
|
||||
this.setLoading(true)
|
||||
try {
|
||||
await sessionStore.fetchLiveSessions({
|
||||
...this.instance.toSearch(),
|
||||
page: this.currentPage,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Error fetching sessions:', e);
|
||||
} finally {
|
||||
this.setLoading(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -114,7 +114,9 @@ class UserStore {
|
|||
get isEnterprise() {
|
||||
return (
|
||||
this.account?.edition === 'ee' ||
|
||||
this.authStore.authDetails?.edition === 'ee'
|
||||
this.account?.edition === 'msaas' ||
|
||||
this.authStore.authDetails?.edition === 'ee' ||
|
||||
this.authStore.authDetails?.edition === 'msaas'
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -245,8 +247,8 @@ class UserStore {
|
|||
const errStr = err.errors[0]
|
||||
? err.errors[0].includes('already exists')
|
||||
? this.t(
|
||||
"This email is already linked to an account or team on OpenReplay and can't be used again.",
|
||||
)
|
||||
"This email is already linked to an account or team on OpenReplay and can't be used again.",
|
||||
)
|
||||
: err.errors[0]
|
||||
: this.t('Error saving user');
|
||||
toast.error(errStr);
|
||||
|
|
@ -389,15 +391,16 @@ class UserStore {
|
|||
this.signUpRequest = { loading: false, errors: [] };
|
||||
});
|
||||
} catch (error) {
|
||||
const inUse = error.message.includes('already in use');
|
||||
const inUseMsg = this.t('An account with this email already exists. Please log in or use a different email address.')
|
||||
const genericMsg = this.t('Error signing up; please check your data and try again')
|
||||
runInAction(() => {
|
||||
this.signUpRequest = {
|
||||
loading: false,
|
||||
errors: error.response?.errors || [],
|
||||
};
|
||||
});
|
||||
toast.error(
|
||||
this.t('Error signing up; please check your data and try again'),
|
||||
);
|
||||
toast.error(inUse ? inUseMsg : genericMsg);
|
||||
} finally {
|
||||
runInAction(() => {
|
||||
this.signUpRequest.loading = false;
|
||||
|
|
@ -416,9 +419,9 @@ class UserStore {
|
|||
this.jwt = data.jwt;
|
||||
this.spotJwt = data.spotJwt;
|
||||
});
|
||||
} catch (error) {
|
||||
toast.error(this.t('Error resetting your password; please try again'));
|
||||
return error.response;
|
||||
} catch (e) {
|
||||
toast.error(e.message || this.t('Error resetting your password; please try again'));
|
||||
throw e;
|
||||
} finally {
|
||||
runInAction(() => {
|
||||
this.loading = false;
|
||||
|
|
@ -663,14 +666,14 @@ class AuthStore {
|
|||
{
|
||||
key: 'authDetails',
|
||||
serialize: (ad) => {
|
||||
delete ad.edition;
|
||||
// delete ad.edition;
|
||||
return Object.keys(ad).length > 0
|
||||
? JSON.stringify(ad)
|
||||
: JSON.stringify({});
|
||||
},
|
||||
deserialize: (json) => {
|
||||
const ad = JSON.parse(json);
|
||||
delete ad.edition;
|
||||
// delete ad.edition;
|
||||
return ad;
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -150,10 +150,10 @@ export default class MessageLoader {
|
|||
});
|
||||
|
||||
const sortedMsgs = msgs
|
||||
// .sort((m1, m2) => m1.time - m2.time);
|
||||
// .sort((m1, m2) => m1.time - m2.time)
|
||||
.sort(brokenDomSorter)
|
||||
.sort(sortIframes);
|
||||
|
||||
|
||||
if (brokenMessages > 0) {
|
||||
console.warn(
|
||||
'Broken timestamp messages',
|
||||
|
|
@ -383,7 +383,6 @@ const DOMMessages = [
|
|||
MType.CreateElementNode,
|
||||
MType.CreateTextNode,
|
||||
MType.MoveNode,
|
||||
MType.RemoveNode,
|
||||
MType.CreateIFrameDocument,
|
||||
];
|
||||
|
||||
|
|
@ -395,6 +394,11 @@ function brokenDomSorter(m1: PlayerMsg, m2: PlayerMsg) {
|
|||
if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
|
||||
return 1;
|
||||
|
||||
if (m1.tp === MType.RemoveNode)
|
||||
return 1;
|
||||
if (m2.tp === MType.RemoveNode)
|
||||
return -1;
|
||||
|
||||
const m1IsDOM = DOMMessages.includes(m1.tp);
|
||||
const m2IsDOM = DOMMessages.includes(m2.tp);
|
||||
if (m1IsDOM && m2IsDOM) {
|
||||
|
|
|
|||
|
|
@ -190,6 +190,9 @@ export default class AssistManager {
|
|||
auth: {
|
||||
token: agentToken,
|
||||
},
|
||||
extraHeaders: {
|
||||
sessionId: this.session.sessionId,
|
||||
},
|
||||
query: {
|
||||
peerId: this.peerID,
|
||||
projectId,
|
||||
|
|
|
|||
|
|
@ -185,8 +185,7 @@ export default class Call {
|
|||
pc.ontrack = (event) => {
|
||||
const stream = event.streams[0];
|
||||
if (stream && !this.videoStreams[remotePeerId]) {
|
||||
const clonnedStream = stream.clone();
|
||||
this.videoStreams[remotePeerId] = clonnedStream.getVideoTracks()[0];
|
||||
this.videoStreams[remotePeerId] = stream.getVideoTracks()[0];
|
||||
if (this.store.get().calling !== CallingState.OnCall) {
|
||||
this.store.update({ calling: CallingState.OnCall });
|
||||
}
|
||||
|
|
@ -305,22 +304,18 @@ export default class Call {
|
|||
}
|
||||
try {
|
||||
// if the connection is not established yet, then set remoteDescription to peer
|
||||
if (!pc.localDescription) {
|
||||
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
|
||||
const answer = await pc.createAnswer();
|
||||
await pc.setLocalDescription(answer);
|
||||
if (isAgent) {
|
||||
this.socket.emit('WEBRTC_AGENT_CALL', {
|
||||
from: this.callID,
|
||||
answer,
|
||||
toAgentId: getSocketIdByCallId(fromCallId),
|
||||
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
|
||||
});
|
||||
} else {
|
||||
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
|
||||
}
|
||||
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
|
||||
const answer = await pc.createAnswer();
|
||||
await pc.setLocalDescription(answer);
|
||||
if (isAgent) {
|
||||
this.socket.emit('WEBRTC_AGENT_CALL', {
|
||||
from: this.callID,
|
||||
answer,
|
||||
toAgentId: getSocketIdByCallId(fromCallId),
|
||||
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
|
||||
});
|
||||
} else {
|
||||
logger.warn('Skipping setRemoteDescription: Already in stable state');
|
||||
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Error setting remote description from answer', e);
|
||||
|
|
@ -388,13 +383,13 @@ export default class Call {
|
|||
private handleCallEnd() {
|
||||
// If the call is not completed, then call onCallEnd
|
||||
if (this.store.get().calling !== CallingState.NoCall) {
|
||||
this.callArgs && this.callArgs.onCallEnd();
|
||||
this.callArgs && this.callArgs.onRemoteCallEnd();
|
||||
}
|
||||
// change state to NoCall
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
// Close all created RTCPeerConnection
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onRemoteCallEnd();
|
||||
// Clear connections
|
||||
this.connections = {};
|
||||
this.callArgs = null;
|
||||
|
|
@ -414,7 +409,7 @@ export default class Call {
|
|||
// Close all connections and reset callArgs
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.connections = {};
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onRemoteCallEnd();
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
this.callArgs = null;
|
||||
} else {
|
||||
|
|
@ -443,7 +438,8 @@ export default class Call {
|
|||
private callArgs: {
|
||||
localStream: LocalStream;
|
||||
onStream: (s: MediaStream, isAgent: boolean) => void;
|
||||
onCallEnd: () => void;
|
||||
onRemoteCallEnd: () => void;
|
||||
onLocalCallEnd: () => void;
|
||||
onReject: () => void;
|
||||
onError?: (arg?: any) => void;
|
||||
} | null = null;
|
||||
|
|
@ -451,14 +447,16 @@ export default class Call {
|
|||
setCallArgs(
|
||||
localStream: LocalStream,
|
||||
onStream: (s: MediaStream, isAgent: boolean) => void,
|
||||
onCallEnd: () => void,
|
||||
onRemoteCallEnd: () => void,
|
||||
onLocalCallEnd: () => void,
|
||||
onReject: () => void,
|
||||
onError?: (e?: any) => void,
|
||||
) {
|
||||
this.callArgs = {
|
||||
localStream,
|
||||
onStream,
|
||||
onCallEnd,
|
||||
onRemoteCallEnd,
|
||||
onLocalCallEnd,
|
||||
onReject,
|
||||
onError,
|
||||
};
|
||||
|
|
@ -549,7 +547,7 @@ export default class Call {
|
|||
void this.initiateCallEnd();
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.connections = {};
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onLocalCallEnd();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -138,26 +138,9 @@ export default class UserService {
|
|||
}
|
||||
|
||||
async resetPassword(data: any) {
|
||||
try {
|
||||
const response = await this.client.post('/password/reset', data);
|
||||
const responseData = await response.json();
|
||||
if (responseData.errors) {
|
||||
throw new Error(
|
||||
responseData.errors[0] || 'An unexpected error occurred.',
|
||||
);
|
||||
}
|
||||
|
||||
return responseData || {};
|
||||
} catch (error: any) {
|
||||
if (error.response) {
|
||||
const errorData = await error.response.json();
|
||||
const errorMessage = errorData.errors
|
||||
? errorData.errors[0]
|
||||
: 'An unexpected error occurred.';
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
throw new Error('An unexpected error occurred.');
|
||||
}
|
||||
const response = await this.client.post('/password/reset', data);
|
||||
const responseData = await response.json();
|
||||
return responseData || {};
|
||||
}
|
||||
|
||||
async requestResetPassword(data: any) {
|
||||
|
|
|
|||
|
|
@ -597,8 +597,7 @@ function saveAsFile(blob: Blob, filename: string) {
|
|||
}
|
||||
|
||||
export function exportAntCsv(tableColumns, tableData, filename = 'table.csv') {
|
||||
console.log(tableColumns, tableData);
|
||||
const headers = tableColumns.map((col) => col.title).join(',');
|
||||
const headers = tableColumns.map((col) => col._pureTitle).join(',');
|
||||
const rows = tableData.map((row) =>
|
||||
tableColumns
|
||||
.map((col) => {
|
||||
|
|
|
|||
219
frontend/app/withRecaptcha.tsx
Normal file
219
frontend/app/withRecaptcha.tsx
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
import React, { useState, useRef, ComponentType, ReactNode, useCallback, useEffect, useLayoutEffect } from 'react';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { toast } from "react-toastify";
|
||||
|
||||
// Define a more specific type for submission data
|
||||
export interface SubmissionData {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export interface WithCaptchaProps {
|
||||
submitWithCaptcha: (data: SubmissionData) => Promise<any>;
|
||||
hasCaptchaError: boolean;
|
||||
isVerifyingCaptcha: boolean;
|
||||
resetCaptcha: () => void;
|
||||
}
|
||||
|
||||
export interface WithCaptchaOptions {
|
||||
position?: 'visible' | 'hidden';
|
||||
errorMessage?: string;
|
||||
theme?: 'light' | 'dark';
|
||||
size?: 'normal' | 'compact' | 'invisible';
|
||||
}
|
||||
|
||||
// Safely get environment variables with fallbacks
|
||||
const getCaptchaConfig = () => {
|
||||
const enabled = typeof window !== 'undefined' &&
|
||||
window.env?.CAPTCHA_ENABLED === 'true';
|
||||
|
||||
const siteKey = typeof window !== 'undefined' ?
|
||||
window.env?.CAPTCHA_SITE_KEY || '' : '';
|
||||
|
||||
return { enabled, siteKey };
|
||||
};
|
||||
|
||||
/**
|
||||
* Higher-Order Component that adds reCAPTCHA functionality to a form component
|
||||
*
|
||||
* @param WrappedComponent The component to wrap with CAPTCHA functionality
|
||||
* @param options Configuration options for the CAPTCHA behavior
|
||||
* @returns A new component with CAPTCHA capabilities
|
||||
*/
|
||||
const withCaptcha = <P extends object>(
|
||||
WrappedComponent: ComponentType<P & WithCaptchaProps>,
|
||||
options: WithCaptchaOptions = {}
|
||||
): React.FC<P> => {
|
||||
// Default options
|
||||
const {
|
||||
position = 'hidden',
|
||||
errorMessage = 'Please complete the CAPTCHA verification',
|
||||
theme = 'light',
|
||||
size = 'invisible'
|
||||
} = options;
|
||||
|
||||
const WithCaptchaComponent: React.FC<P> = (props: P) => {
|
||||
const { enabled: CAPTCHA_ENABLED, siteKey: CAPTCHA_SITE_KEY } = getCaptchaConfig();
|
||||
const [captchaToken, setCaptchaToken] = useState<string | null>(null);
|
||||
const [isVerifyingCaptcha, setIsVerifyingCaptcha] = useState<boolean>(false);
|
||||
const [tokenExpired, setTokenExpired] = useState<boolean>(false);
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
|
||||
// Reset token when expired
|
||||
useEffect(() => {
|
||||
if (tokenExpired) {
|
||||
setCaptchaToken(null);
|
||||
setTokenExpired(false);
|
||||
}
|
||||
}, [tokenExpired]);
|
||||
|
||||
// Handle token expiration
|
||||
const onCaptchaExpired = useCallback(() => {
|
||||
setTokenExpired(true);
|
||||
if (CAPTCHA_ENABLED) {
|
||||
toast.warning('CAPTCHA verification expired. Please verify again.');
|
||||
}
|
||||
}, [CAPTCHA_ENABLED]);
|
||||
|
||||
// Handle token change
|
||||
let onCaptchaChange = (token: string | null) => {
|
||||
console.log('Standard captcha callback received token:', !!token);
|
||||
setCaptchaToken(token);
|
||||
setTokenExpired(false);
|
||||
};
|
||||
|
||||
// Reset captcha manually
|
||||
const resetCaptcha = useCallback(() => {
|
||||
recaptchaRef.current?.reset();
|
||||
setCaptchaToken(null);
|
||||
}, []);
|
||||
|
||||
// Submit with captcha verification
|
||||
const submitWithCaptcha = useCallback(
|
||||
(data: SubmissionData): Promise<any> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!CAPTCHA_ENABLED) {
|
||||
// CAPTCHA not enabled, resolve with original data
|
||||
resolve(data);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsVerifyingCaptcha(true);
|
||||
|
||||
// Special handling for invisible reCAPTCHA
|
||||
if (size === 'invisible') {
|
||||
// Create a direct token handler function
|
||||
const handleToken = (receivedToken: string | null) => {
|
||||
console.log('reCAPTCHA token received:', !!receivedToken);
|
||||
|
||||
if (receivedToken) {
|
||||
// We have a token, resolve the promise
|
||||
const dataWithCaptcha = {
|
||||
...data,
|
||||
'g-recaptcha-response': receivedToken
|
||||
};
|
||||
|
||||
resolve(dataWithCaptcha);
|
||||
|
||||
// Reset for next use
|
||||
setTimeout(() => {
|
||||
recaptchaRef.current?.reset();
|
||||
setIsVerifyingCaptcha(false);
|
||||
}, 100);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up a callback directly on the reCAPTCHA ref
|
||||
if (recaptchaRef.current) {
|
||||
console.log('Executing invisible reCAPTCHA');
|
||||
|
||||
// Execute the reCAPTCHA challenge
|
||||
recaptchaRef.current.executeAsync()
|
||||
.then((token: string | null) => {
|
||||
handleToken(token);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('reCAPTCHA execution failed:', error);
|
||||
setIsVerifyingCaptcha(false);
|
||||
reject(new Error('CAPTCHA verification failed'));
|
||||
});
|
||||
|
||||
// Set a timeout in case the promise doesn't resolve
|
||||
setTimeout(() => {
|
||||
if (isVerifyingCaptcha) {
|
||||
console.log('reCAPTCHA verification timed out');
|
||||
setIsVerifyingCaptcha(false);
|
||||
toast.error(errorMessage || 'Verification timed out. Please try again.');
|
||||
reject(new Error('CAPTCHA verification timeout'));
|
||||
}
|
||||
}, 5000);
|
||||
} else {
|
||||
console.error('reCAPTCHA ref not available');
|
||||
setIsVerifyingCaptcha(false);
|
||||
reject(new Error('CAPTCHA component not initialized'));
|
||||
}
|
||||
} else if (captchaToken) {
|
||||
// Standard reCAPTCHA with token already available
|
||||
const dataWithCaptcha = {
|
||||
...data,
|
||||
'g-recaptcha-response': captchaToken
|
||||
};
|
||||
|
||||
resolve(dataWithCaptcha);
|
||||
recaptchaRef.current?.reset();
|
||||
setCaptchaToken(null);
|
||||
setIsVerifyingCaptcha(false);
|
||||
} else {
|
||||
// Standard reCAPTCHA but no token yet
|
||||
toast.error(errorMessage || 'Please complete the CAPTCHA verification');
|
||||
reject(new Error('CAPTCHA verification required'));
|
||||
setIsVerifyingCaptcha(false);
|
||||
}
|
||||
});
|
||||
},
|
||||
[CAPTCHA_ENABLED, captchaToken, errorMessage, size, isVerifyingCaptcha]
|
||||
);
|
||||
|
||||
const hasCaptchaError = !captchaToken && CAPTCHA_ENABLED === true;
|
||||
|
||||
return (
|
||||
<>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className={position === 'hidden' ? 'sr-only' : 'mb-4'}>
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={onCaptchaChange}
|
||||
onExpired={onCaptchaExpired}
|
||||
theme={theme}
|
||||
size={size}
|
||||
/>
|
||||
{hasCaptchaError && (
|
||||
<div className="text-red-500 text-sm mt-1">
|
||||
{errorMessage}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<WrappedComponent
|
||||
{...props}
|
||||
submitWithCaptcha={submitWithCaptcha}
|
||||
hasCaptchaError={hasCaptchaError}
|
||||
isVerifyingCaptcha={isVerifyingCaptcha}
|
||||
resetCaptcha={resetCaptcha}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
// Display name for debugging
|
||||
const wrappedComponentName =
|
||||
WrappedComponent.displayName ||
|
||||
WrappedComponent.name ||
|
||||
'Component';
|
||||
|
||||
WithCaptchaComponent.displayName = `WithCaptcha(${wrappedComponentName})`;
|
||||
|
||||
return WithCaptchaComponent;
|
||||
};
|
||||
|
||||
export default withCaptcha;
|
||||
|
|
@ -10,7 +10,27 @@ metadata:
|
|||
{{- include "assist.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/rewrite-target: /$1
|
||||
nginx.ingress.kubernetes.io/upstream-hash-by: $http_x_forwarded_for
|
||||
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||
#set $sticky_used "no";
|
||||
#if ($sessionid != "") {
|
||||
# set $sticky_used "yes";
|
||||
#}
|
||||
|
||||
#add_header X-Debug-Session-ID $sessionid;
|
||||
#add_header X-Debug-Session-Type "wss";
|
||||
#add_header X-Sticky-Session-Used $sticky_used;
|
||||
#add_header X-Upstream-Server $upstream_addr;
|
||||
|
||||
proxy_hide_header access-control-allow-headers;
|
||||
proxy_hide_header Access-Control-Allow-Origin;
|
||||
add_header 'Access-Control-Allow-Origin' $http_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'sessionid, Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Max-Age' 1728000;
|
||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
||||
|
||||
nginx.ingress.kubernetes.io/upstream-hash-by: $session_id
|
||||
|
||||
{{- with .Values.ingress.annotations }}
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
|
|
|||
|
|
@ -70,6 +70,19 @@ ingress:
|
|||
enabled: true
|
||||
className: "{{ .Values.global.ingress.controller.ingressClassResource.name }}"
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||
add_header X-Debug-Session-ID $http_sessionid;
|
||||
add_header X-Debug-Session-Type "wss";
|
||||
|
||||
# CORS configuration
|
||||
# We don't need the upstream header
|
||||
proxy_hide_header Access-Control-Allow-Origin;
|
||||
add_header 'Access-Control-Allow-Origin' $http_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'sessionid, Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Max-Age' 1728000;
|
||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
||||
|
||||
nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
|
||||
nginx.ingress.kubernetes.io/proxy-send-timeout: "3600"
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
|
|
|
|||
|
|
@ -18,4 +18,4 @@ version: 0.1.10
|
|||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.22.0"
|
||||
AppVersion: "v1.22.1"
|
||||
|
|
|
|||
|
|
@ -17,6 +17,13 @@ redis: &redis
|
|||
ingress-nginx:
|
||||
enabled: true
|
||||
controller:
|
||||
config:
|
||||
http-snippet: |-
|
||||
# Extract sessionid from peerId, it'll be used for sticky session.
|
||||
map $arg_peerId $sessionid {
|
||||
default "";
|
||||
"~.*-(\d+)(?:-.*|$)" $1;
|
||||
}
|
||||
admissionWebhooks:
|
||||
patch:
|
||||
podAnnotations:
|
||||
|
|
|
|||
24
scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
24
scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
session_id UInt64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
|
||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0';
|
||||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
|
|
@ -515,6 +515,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name);
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
||||
|
||||
-- The full list of properties (events and users)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||
|
|
@ -532,3 +543,15 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, property_name, is_event_property);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||
(
|
||||
project_id UInt16,
|
||||
user_id UInt32,
|
||||
session_id UInt64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
|
|
|||
30
scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
30
scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
\set previous_version 'v1.22.0'
|
||||
\set next_version 'v1.23.0'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
--
|
||||
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
\set or_version 'v1.22.0'
|
||||
\set or_version 'v1.23.0'
|
||||
SET client_min_messages TO NOTICE;
|
||||
\set ON_ERROR_STOP true
|
||||
SELECT EXISTS (SELECT 1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
|
||||
|
||||
|
||||
DROP TABLE IF EXISTS experimental.user_viewed_sessions;
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.event_properties;
|
||||
27
scripts/schema/db/rollback_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
27
scripts/schema/db/rollback_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
\set previous_version 'v1.23.0'
|
||||
\set next_version 'v1.22.0'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -42,7 +42,7 @@ up to date with every new library you use.
|
|||
| elasticsearch-py | Apache2 | Python |
|
||||
| jira | BSD2 | Python |
|
||||
| redis-py | MIT | Python |
|
||||
| clickhouse-driver | MIT | Python |
|
||||
| clickhouse-connect | Apache2 | Python |
|
||||
| python3-saml | MIT | Python |
|
||||
| kubernetes | Apache2 | Python |
|
||||
| chalice | Apache2 | Python |
|
||||
|
|
|
|||
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue