Compare commits
57 commits
main
...
update_bat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7894d10509 | ||
|
|
9c6f587b98 | ||
|
|
300a857a5c | ||
|
|
eba22e0efa | ||
|
|
664f6b9014 | ||
|
|
5bbd7cff10 | ||
|
|
6f172d4f01 | ||
|
|
829e1c8bde | ||
|
|
e7d309dadf | ||
|
|
4bac12308a | ||
|
|
2aba1d9a52 | ||
|
|
1f4e32e4f2 | ||
|
|
49f98967d6 | ||
|
|
356fa02094 | ||
|
|
a8e47e59ad | ||
|
|
22ee13e641 | ||
|
|
c760d29fb4 | ||
|
|
d77a518cf0 | ||
|
|
f595a5932a | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
de2f87270a | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
137 changed files with 2532 additions and 2014 deletions
11
api/Pipfile
11
api/Pipfile
|
|
@ -6,16 +6,15 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.16"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.11"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_events(project_id: int):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT event_name, display_name
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY display_name;""",
|
||||
parameters={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(x)
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
for i, f in enumerate(data.filters):
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"isNotNull({column})"
|
||||
elif is_undefined:
|
||||
condition = f"isNull({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(f"{column} {op} %({f_k})s", f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
ev_constraints = []
|
||||
for i, e in enumerate(data.events):
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: e.event_name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
if len(e.properties.filters) > 0:
|
||||
for j, f in enumerate(e.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=p_k)}
|
||||
if f.is_predefined:
|
||||
sub_condition = f"{f.name} {op} %({p_k})s"
|
||||
else:
|
||||
sub_condition = f"properties.{f.name} {op} %({p_k})s"
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, f.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND ("
|
||||
for j, c in enumerate(sub_conditions):
|
||||
if j > 0:
|
||||
condition += " " + e.properties.operators[j - 1] + " " + c
|
||||
else:
|
||||
condition += c
|
||||
condition += ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "src": 2}
|
||||
19
api/chalicelib/core/product_analytics/properties.py
Normal file
19
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def get_properties(project_id: int, event_name):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name,
|
||||
all_properties.display_name
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id=%(project_id)s
|
||||
AND all_properties.project_id=%(project_id)s
|
||||
AND event_properties.event_name=%(event_name)s
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id,"event_name": event_name})
|
||||
properties = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(properties)
|
||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -671,24 +671,36 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.`$properties`.selector {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"sub.`$properties`.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
sh.multi_conditions(f"main.`$properties`.{_column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
|
@ -870,12 +882,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
|
|
@ -1193,6 +1208,28 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
event_fiters = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k)}
|
||||
op = sh.get_sql_operator(property.operator)
|
||||
condition = f"main.properties.{property.name} {op} %({a_k})s"
|
||||
if property.is_predefined:
|
||||
condition = f"main.{property.name} {op} %({a_k})s"
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
event_fiters.append(event_where[-1])
|
||||
if len(event_fiters) > 0:
|
||||
events_conditions[-1]["condition"] += " AND ("
|
||||
for l, e_f in enumerate(event_fiters):
|
||||
if l > 0:
|
||||
events_conditions[-1]["condition"] += event.properties.operators[l - 1] + e_f
|
||||
else:
|
||||
events_conditions[-1]["condition"] += e_f
|
||||
events_conditions[-1]["condition"] += ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
|
|
@ -141,7 +141,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
) AS users_sessions;""",
|
||||
full_args)
|
||||
elif ids_only:
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
|
||||
|
|
@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
parameters=full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
|
||||
try:
|
||||
logging.debug("--------------------")
|
||||
sessions_list = cur.execute(main_query)
|
||||
logging.debug("--------------------")
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
|
|
@ -122,6 +122,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
sort = 'session_id'
|
||||
if data.sort is not None and data.sort != "session_id":
|
||||
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
|
||||
if data.sort == 'datetime':
|
||||
sort = 'start_ts'
|
||||
else:
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,73 +1,185 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_driver
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, *, parameters=None):
|
||||
if parameters is None:
|
||||
return query
|
||||
return query % {
|
||||
key: f"'{value}'" if isinstance(value, str) else value
|
||||
for key, value in parameters.items()
|
||||
}
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -75,3 +75,4 @@ EXP_AUTOCOMPLETE=true
|
|||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
-- -- Original Q3
|
||||
-- WITH ranked_events AS (SELECT *
|
||||
-- FROM ranked_events_1736344377403),
|
||||
-- n1 AS (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM ranked_events
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- AND isNotNull(next_value)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n2 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n1
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 2
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n3 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n2
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 3
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n4 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n3
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 4
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n5 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n4
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 5
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8)
|
||||
-- SELECT *
|
||||
-- FROM (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n1
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n2
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n3
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n4
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n5) AS chart_steps
|
||||
-- ORDER BY event_number_in_session;
|
||||
|
||||
-- Q1
|
||||
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
|
||||
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
|
||||
FROM experimental.events AS events
|
||||
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
|
||||
AND events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
GROUP BY 1),
|
||||
pre_ranked_events AS (SELECT *
|
||||
FROM (SELECT session_id,
|
||||
event_type,
|
||||
datetime,
|
||||
url_path AS e_value,
|
||||
row_number() OVER (PARTITION BY session_id
|
||||
ORDER BY datetime ,
|
||||
message_id ) AS event_number_in_session
|
||||
FROM experimental.events AS events
|
||||
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
|
||||
WHERE events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
AND (events.event_type = 'LOCATION')
|
||||
AND events.datetime >= initial_event.start_event_timestamp
|
||||
) AS full_ranked_events
|
||||
WHERE event_number_in_session <= 5)
|
||||
SELECT *
|
||||
FROM pre_ranked_events);
|
||||
;
|
||||
|
||||
SELECT *
|
||||
FROM pre_ranked_events_1736344377403
|
||||
WHERE event_number_in_session < 3;
|
||||
|
||||
|
||||
|
||||
-- ---------Q2-----------
|
||||
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
|
||||
DROP TABLE ranked_events_1736344377403;
|
||||
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH pre_ranked_events AS (SELECT *
|
||||
FROM pre_ranked_events_1736344377403),
|
||||
start_points AS (SELECT DISTINCT session_id
|
||||
FROM pre_ranked_events
|
||||
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
|
||||
AND pre_ranked_events.event_number_in_session = 1),
|
||||
ranked_events AS (SELECT pre_ranked_events.*,
|
||||
leadInFrame(e_value)
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
|
||||
leadInFrame(toNullable(event_type))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
|
||||
FROM start_points
|
||||
INNER JOIN pre_ranked_events USING (session_id))
|
||||
SELECT *
|
||||
FROM ranked_events);
|
||||
|
||||
|
||||
-- ranked events
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
-- AND e_value='/en/deployment/deploy-docker/'
|
||||
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
|
||||
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
|
||||
AND isNotNull(next_value)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
|
||||
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
SELECT COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
AND isNull(next_value)
|
||||
;
|
||||
|
||||
-- ---------Q3 MORE -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
-- TODO: make this as top_steps, where every step will go to next as top/others
|
||||
top_n1 AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n2 AS (-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE (event_type, e_value) IN (SELECT event_type,
|
||||
e_value
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
GROUP BY event_type, e_value
|
||||
ORDER BY SUM(sessions_count) DESC
|
||||
LIMIT 3)
|
||||
ORDER BY sessions_count DESC),
|
||||
top_n AS (SELECT *
|
||||
FROM top_n1
|
||||
UNION ALL
|
||||
SELECT *
|
||||
FROM top_n2),
|
||||
u_top_n AS (SELECT DISTINCT event_number_in_session,
|
||||
event_type,
|
||||
e_value
|
||||
FROM top_n),
|
||||
others_n AS (
|
||||
-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3)
|
||||
SELECT *
|
||||
FROM (
|
||||
-- Top
|
||||
SELECT *
|
||||
FROM top_n
|
||||
-- UNION ALL
|
||||
-- -- Others
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count)
|
||||
-- FROM others_n
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Top go to Drop
|
||||
-- SELECT drop_n.event_number_in_session,
|
||||
-- drop_n.event_type,
|
||||
-- drop_n.e_value,
|
||||
-- drop_n.next_type,
|
||||
-- drop_n.next_value,
|
||||
-- drop_n.sessions_count
|
||||
-- FROM drop_n
|
||||
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
|
||||
-- AND drop_n.event_type = u_top_n.event_type
|
||||
-- AND drop_n.e_value = u_top_n.e_value)
|
||||
-- ORDER BY drop_n.event_number_in_session
|
||||
-- -- -- UNION ALL
|
||||
-- -- -- Top go to Others
|
||||
-- SELECT top_n.event_number_in_session,
|
||||
-- top_n.event_type,
|
||||
-- top_n.e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(top_n.sessions_count) AS sessions_count
|
||||
-- FROM top_n
|
||||
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
|
||||
-- AND top_n.next_type = others_n.event_type
|
||||
-- AND top_n.next_value = others_n.e_value)
|
||||
-- WHERE others_n.event_number_in_session IS NULL
|
||||
-- AND top_n.next_type IS NOT NULL
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Others got to Top
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- others_n.s_next_type AS next_type,
|
||||
-- others_n.s_next_value AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
|
||||
-- others_n.s_next_type = top_n.event_type AND
|
||||
-- others_n.s_next_value = top_n.event_type)
|
||||
-- GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
-- UNION ALL
|
||||
-- -- TODO: find if this works or not
|
||||
-- -- Others got to Others
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- 'OTHERS' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
|
||||
-- AND others_n.s_next_type = u_top_n.event_type
|
||||
-- AND others_n.s_next_value = u_top_n.e_value)
|
||||
-- WHERE u_top_n.event_number_in_session IS NULL
|
||||
-- GROUP BY others_n.event_number_in_session
|
||||
)
|
||||
ORDER BY event_number_in_session;
|
||||
|
||||
|
||||
-- ---------Q3 TOP ON VALUE ONLY -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
top_n AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
LIMIT 1
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n2
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n3
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n_with_next AS (SELECT n1.*
|
||||
FROM n1
|
||||
UNION ALL
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
|
||||
AND n2.event_type = top_n.event_type
|
||||
AND n2.e_value = top_n.e_value)),
|
||||
others_n AS (
|
||||
-- STEP 2
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 2)
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT n3.*
|
||||
FROM n3
|
||||
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 3))
|
||||
SELECT *
|
||||
FROM (
|
||||
-- SELECT sum(top_n_with_next.sessions_count)
|
||||
-- FROM top_n_with_next
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- -- AND isNotNull(next_value)
|
||||
-- AND (next_type, next_value) IN
|
||||
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
|
||||
-- -- SELECT * FROM others_n
|
||||
-- -- SELECT * FROM n2
|
||||
-- SELECT *
|
||||
-- FROM top_n
|
||||
-- );
|
||||
-- Top to Top: valid
|
||||
SELECT top_n_with_next.*
|
||||
FROM top_n_with_next
|
||||
INNER JOIN top_n
|
||||
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
|
||||
AND top_n_with_next.next_type = top_n.event_type
|
||||
AND top_n_with_next.next_value = top_n.e_value)
|
||||
UNION ALL
|
||||
-- Top to Others: valid
|
||||
SELECT top_n_with_next.event_number_in_session,
|
||||
top_n_with_next.event_type,
|
||||
top_n_with_next.e_value,
|
||||
'OTHER' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(top_n_with_next.sessions_count) AS sessions_count
|
||||
FROM top_n_with_next
|
||||
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
|
||||
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
|
||||
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
|
||||
UNION ALL
|
||||
-- Top go to Drop: valid
|
||||
SELECT drop_n.event_number_in_session,
|
||||
drop_n.event_type,
|
||||
drop_n.e_value,
|
||||
drop_n.next_type,
|
||||
drop_n.next_value,
|
||||
drop_n.sessions_count
|
||||
FROM drop_n
|
||||
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
|
||||
AND drop_n.event_type = top_n.event_type
|
||||
AND drop_n.e_value = top_n.e_value)
|
||||
ORDER BY drop_n.event_number_in_session
|
||||
UNION ALL
|
||||
-- Others got to Drop: valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
UNION ALL
|
||||
-- Others got to Top:valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
others_n.next_type,
|
||||
others_n.next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
|
||||
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
|
||||
UNION ALL
|
||||
-- Others got to Others
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'OTHERS' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session)
|
||||
ORDER BY event_number_in_session, sessions_count
|
||||
DESC;
|
||||
|
||||
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
28
api/routers/subs/product_analytics.py
Normal file
28
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import schemas
|
||||
from chalicelib.core.product_analytics import events, properties
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from fastapi import Body, Depends, BackgroundTasks
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, event_name: str = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not event_name or len(event_name) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_properties(project_id=projectId, event_name=event_name)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
19
api/schemas/product_analytics.py
Normal file
19
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Optional, List
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
event_name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[EventSearchSchema] = Field(default_factory=list, description="operator between events is OR")
|
||||
filters: List[PropertyFilterSchema] = Field(default_factory=list, description="operator between filters is AND")
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -545,6 +545,70 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operators: List[Literal["and", "or"]] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def event_filter_validator(self):
|
||||
assert len(self.filters) == 0 \
|
||||
or len(self.operators) == len(self.filters) - 1, \
|
||||
"Number of operators must match the number of filter-1"
|
||||
return self
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
|
|
@ -553,6 +617,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -1529,3 +1594,30 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
|
|
|||
|
|
@ -27,9 +27,14 @@ const respond = function (req, res, data) {
|
|||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
if (!res.aborted) {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
} else {
|
||||
logger.debug("response aborted");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
|
|
|
|||
|
|
@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
|
||||
// Add tracker version to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Handler's logic
|
||||
if req.ProjectKey == nil {
|
||||
|
|
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
// Add projectID to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
|
||||
|
||||
// Validate tracker version
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the project supports mobile sessions
|
||||
if !p.IsWeb() {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)
|
||||
|
|
|
|||
9
ee/api/.gitignore
vendored
9
ee/api/.gitignore
vendored
|
|
@ -223,11 +223,14 @@ Pipfile.lock
|
|||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/sessions/__init__.py
|
||||
/chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
/chalicelib/core/sessions/sessions_search_exp.py
|
||||
/chalicelib/core/metrics/modules
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
/chalicelib/core/sourcemaps_parser.py
|
||||
/chalicelib/core/sourcemaps
|
||||
/chalicelib/core/tags.py
|
||||
/chalicelib/core/product_analytics
|
||||
/chalicelib/saml
|
||||
/chalicelib/utils/__init__.py
|
||||
/chalicelib/utils/args_transformer.py
|
||||
|
|
@ -290,3 +293,5 @@ Pipfile.lock
|
|||
/chalicelib/core/errors/errors_ch.py
|
||||
/chalicelib/core/errors/errors_details.py
|
||||
/chalicelib/utils/contextual_validators.py
|
||||
/routers/subs/product_analytics.py
|
||||
/schemas/product_analytics.py
|
||||
|
|
|
|||
|
|
@ -6,23 +6,20 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.16"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.11"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
gunicorn = "==23.0.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
apscheduler = "==3.11.0"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.20"
|
||||
redis = "==5.2.1"
|
||||
azure-storage-blob = "==12.24.1"
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
|
|||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
|
|
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/ap")
|
||||
app.include_router(product_analytics.app, prefix="/ap")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/ap")
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
app.include_router(saml.public_app)
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search as sessions_search_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_exp as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search as sessions_search
|
||||
|
|
@ -44,12 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
|
|||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/sessions/__init__.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
|
||||
rm -rf ./chalicelib/core/metrics/modules
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps.py
|
||||
rm -rf ./chalicelib/core/sourcemaps_parser.py
|
||||
rm -rf ./chalicelib/core/sourcemaps
|
||||
rm -rf ./chalicelib/core/user_testing.py
|
||||
rm -rf ./chalicelib/core/tags.py
|
||||
rm -rf ./chalicelib/core/product_analytics
|
||||
rm -rf ./chalicelib/saml
|
||||
rm -rf ./chalicelib/utils/__init__.py
|
||||
rm -rf ./chalicelib/utils/args_transformer.py
|
||||
|
|
@ -110,3 +113,5 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
|
|||
rm -rf ./chalicelib/core/errors/errors_ch.py
|
||||
rm -rf ./chalicelib/core/errors/errors_details.py
|
||||
rm -rf ./chalicelib/utils/contextual_validators.py
|
||||
rm -rf ./routers/subs/product_analytics.py
|
||||
rm -rf ./schemas/product_analytics.py
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.16
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.11
|
||||
uvicorn[standard]==0.34.0
|
||||
gunicorn==23.0.0
|
||||
python-decouple==3.8
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from .schemas import *
|
||||
from .schemas_ee import *
|
||||
from .assist_stats_schema import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
|
|||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import schemas
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .overrides import BaseModel, Enum
|
||||
from .transformers_validators import remove_whitespace
|
||||
|
||||
|
||||
|
|
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
|||
return values
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[schemas.IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class AssistRecordUpdatePayloadSchema(BaseModel):
|
||||
name: str = Field(..., min_length=1)
|
||||
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
||||
|
|
|
|||
|
|
@ -83,9 +83,11 @@ if (process.env.uws !== "true") {
|
|||
const uWrapper = function (fn) {
|
||||
return (res, req) => {
|
||||
res.id = 1;
|
||||
res.aborted = false;
|
||||
req.startTs = performance.now(); // track request's start timestamp
|
||||
req.method = req.getMethod();
|
||||
res.onAborted(() => {
|
||||
res.aborted = true;
|
||||
onAbortedOrFinishedResponse(res);
|
||||
});
|
||||
return fn(req, res);
|
||||
|
|
|
|||
|
|
@ -3,20 +3,50 @@ const {getCompressionConfig} = require("./helper");
|
|||
const {logger} = require('./logger');
|
||||
|
||||
let io;
|
||||
const getServer = function () {return io;}
|
||||
|
||||
const getServer = function () {
|
||||
return io;
|
||||
const useRedis = process.env.redis === "true";
|
||||
let inMemorySocketsCache = [];
|
||||
let lastCacheUpdateTime = 0;
|
||||
const CACHE_REFRESH_INTERVAL = parseInt(process.env.cacheRefreshInterval) || 5000;
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
const now = Date.now();
|
||||
logger.info(`Using in-memory cache (age: ${now - lastCacheUpdateTime}ms)`);
|
||||
return inMemorySocketsCache;
|
||||
} else {
|
||||
try {
|
||||
return await io.fetchSockets();
|
||||
} catch (error) {
|
||||
logger.error('Error fetching sockets:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let redisClient;
|
||||
const useRedis = process.env.redis === "true";
|
||||
// Background refresher that runs independently of requests
|
||||
let cacheRefresher = null;
|
||||
function startCacheRefresher() {
|
||||
if (cacheRefresher) clearInterval(cacheRefresher);
|
||||
|
||||
if (useRedis) {
|
||||
const {createClient} = require("redis");
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
redisClient = createClient({url: REDIS_URL});
|
||||
redisClient.on("error", (error) => logger.error(`Redis error : ${error}`));
|
||||
void redisClient.connect();
|
||||
cacheRefresher = setInterval(async () => {
|
||||
const now = Date.now();
|
||||
// Only refresh if cache is stale
|
||||
if (now - lastCacheUpdateTime >= CACHE_REFRESH_INTERVAL) {
|
||||
logger.debug('Background refresh triggered');
|
||||
try {
|
||||
const startTime = performance.now();
|
||||
const result = await io.fetchSockets();
|
||||
inMemorySocketsCache = result;
|
||||
lastCacheUpdateTime = now;
|
||||
const duration = performance.now() - startTime;
|
||||
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
|
||||
} catch (error) {
|
||||
logger.error(`Background refresh error: ${error}`);
|
||||
}
|
||||
}
|
||||
}, CACHE_REFRESH_INTERVAL / 2);
|
||||
}
|
||||
|
||||
const processSocketsList = function (sockets) {
|
||||
|
|
@ -28,24 +58,6 @@ const processSocketsList = function (sockets) {
|
|||
return res
|
||||
}
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
try {
|
||||
let cachedResult = await redisClient.get('fetchSocketsResult');
|
||||
if (cachedResult) {
|
||||
return JSON.parse(cachedResult);
|
||||
}
|
||||
let result = await io.fetchSockets();
|
||||
let cachedString = JSON.stringify(processSocketsList(result));
|
||||
await redisClient.set('fetchSocketsResult', cachedString, {EX: 5});
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('Error setting value with expiration:', error);
|
||||
}
|
||||
}
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
if (!io) {
|
||||
return [];
|
||||
|
|
@ -84,6 +96,7 @@ const createSocketIOServer = function (server, prefix) {
|
|||
});
|
||||
io.attachApp(server);
|
||||
}
|
||||
startCacheRefresher();
|
||||
return io;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
|
|||
|
||||
// For the ender service only
|
||||
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
||||
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
|
||||
rows, err := s.db.Query(`
|
||||
SELECT
|
||||
session_id,
|
||||
CASE
|
||||
WHEN duration IS NULL OR duration < 0 THEN 0
|
||||
ELSE duration
|
||||
END,
|
||||
start_ts
|
||||
FROM sessions
|
||||
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
|
|
@ -654,6 +654,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
|||
ORDER BY (project_id, event_name);
|
||||
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name);
|
||||
|
||||
-- The full list of properties (events and users)
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||
(
|
||||
|
|
|
|||
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
\set previous_version 'v1.22.0-ee'
|
||||
\set next_version 'v1.23.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
--
|
||||
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
\set or_version 'v1.22.0-ee'
|
||||
\set or_version 'v1.23.0-ee'
|
||||
SET client_min_messages TO NOTICE;
|
||||
\set ON_ERROR_STOP true
|
||||
SELECT EXISTS (SELECT 1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.event_properties;
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
\set previous_version 'v1.23.0-ee'
|
||||
\set next_version 'v1.22.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -82,7 +82,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
{ stream: MediaStream; isAgent: boolean }[] | null
|
||||
>([]);
|
||||
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null>(
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null | undefined>(
|
||||
null,
|
||||
);
|
||||
|
||||
|
|
@ -135,6 +135,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
}, [peerConnectionStatus]);
|
||||
|
||||
const addIncomeStream = (stream: MediaStream, isAgent: boolean) => {
|
||||
if (!stream.active) return;
|
||||
setIncomeStream((oldState) => {
|
||||
if (oldState === null) return [{ stream, isAgent }];
|
||||
if (
|
||||
|
|
@ -149,13 +150,8 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
});
|
||||
};
|
||||
|
||||
const removeIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream((prevState) => {
|
||||
if (!prevState) return [];
|
||||
return prevState.filter(
|
||||
(existingStream) => existingStream.stream.id !== stream.id,
|
||||
);
|
||||
});
|
||||
const removeIncomeStream = () => {
|
||||
setIncomeStream([]);
|
||||
};
|
||||
|
||||
function onReject() {
|
||||
|
|
@ -181,7 +177,12 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream(lStream.stream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
onReject,
|
||||
onError,
|
||||
|
|
|
|||
|
|
@ -34,43 +34,40 @@ function VideoContainer({
|
|||
}
|
||||
const iid = setInterval(() => {
|
||||
const track = stream.getVideoTracks()[0];
|
||||
const settings = track?.getSettings();
|
||||
const isDummyVideoTrack = settings
|
||||
? settings.width === 2 ||
|
||||
settings.frameRate === 0 ||
|
||||
(!settings.frameRate && !settings.width)
|
||||
: true;
|
||||
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
|
||||
|
||||
if (isEnabled !== shouldBeEnabled) {
|
||||
setEnabled(shouldBeEnabled);
|
||||
setRemoteEnabled?.(shouldBeEnabled);
|
||||
if (track) {
|
||||
if (!track.enabled) {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
} else {
|
||||
setEnabled(true);
|
||||
setRemoteEnabled?.(true);
|
||||
}
|
||||
} else {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
}
|
||||
}, 500);
|
||||
return () => clearInterval(iid);
|
||||
}, [stream, isEnabled]);
|
||||
}, [stream]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex-1"
|
||||
style={{
|
||||
display: isEnabled ? undefined : 'none',
|
||||
width: isEnabled ? undefined : '0px!important',
|
||||
height: isEnabled ? undefined : '0px !important',
|
||||
border: '1px solid grey',
|
||||
transform: local ? 'scaleX(-1)' : undefined,
|
||||
display: isEnabled ? 'block' : 'none',
|
||||
}}
|
||||
>
|
||||
<video autoPlay ref={ref} muted={muted} style={{ height }} />
|
||||
{isAgent ? (
|
||||
<div
|
||||
style={{
|
||||
position: 'absolute',
|
||||
}}
|
||||
>
|
||||
{t('Agent')}
|
||||
</div>
|
||||
) : null}
|
||||
<video
|
||||
autoPlay
|
||||
ref={ref}
|
||||
muted={muted}
|
||||
style={{ height }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ProfilerDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerProfiler from '@openreplay/tracker-profiler';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
|
|||
const fn = profiler('call_name')(() => {
|
||||
//...
|
||||
}, thisArg); // thisArg is optional`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
|
|||
|
||||
function AssistNpm(props) {
|
||||
const { t } = useTranslation();
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerAssist from '@openreplay/tracker-assist';
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}',
|
||||
});
|
||||
tracker.start()
|
||||
|
||||
tracker.use(trackerAssist(options)); // check the list of available options below`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerAssist from '@openreplay/tracker-assist/cjs';
|
||||
const tracker = new OpenReplay({
|
||||
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}'
|
||||
});
|
||||
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below
|
||||
|
|
|
|||
|
|
@ -14,19 +14,20 @@ function GraphQLDoc() {
|
|||
const projectKey = siteId
|
||||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
//...
|
||||
export const recordGraphQL = tracker.use(trackerGraphQL());`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -15,20 +15,21 @@ function MobxDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
|
||||
const mobxUsage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerMobX from '@openreplay/tracker-mobx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
tracker.start();
|
||||
`;
|
||||
|
||||
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerMobX from '@openreplay/tracker-mobx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function NgRxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
const usage = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
|
|||
export class AppModule {}`;
|
||||
const usageCjs = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -17,10 +17,10 @@ function PiniaDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ReduxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerRedux from '@openreplay/tracker-redux';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = createStore(
|
|||
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
|
||||
);`;
|
||||
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerRedux from '@openreplay/tracker-redux/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function VueDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = new Vuex.Store({
|
|||
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
|
||||
});`;
|
||||
const usageCjs = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,11 +16,10 @@ function ZustandDoc(props) {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
@ -43,11 +42,12 @@ const useBearStore = create(
|
|||
)
|
||||
`;
|
||||
const usageCjs = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import withPageTitle from 'HOCs/withPageTitle';
|
|||
import { PageTitle } from 'UI';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import LanguageSwitcher from "App/components/LanguageSwitcher";
|
||||
import Settings from './Settings';
|
||||
import ChangePassword from './ChangePassword';
|
||||
import styles from './profileSettings.module.css';
|
||||
|
|
@ -20,107 +21,90 @@ function ProfileSettings() {
|
|||
return (
|
||||
<div className="bg-white rounded-lg border shadow-sm p-5">
|
||||
<PageTitle title={<div>{t('Account')}</div>} />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Profile')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t(
|
||||
'Your email address is your identity on OpenReplay and is used to login.',
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Settings />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Profile')}
|
||||
description={t('Your email address is your identity on OpenReplay and is used to login.')}
|
||||
children={<Settings />}
|
||||
/>
|
||||
|
||||
<div className="border-b my-10" />
|
||||
|
||||
{account.hasPassword && (
|
||||
<>
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Change Password')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Updating your password from time to time enhances your account’s security.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<ChangePassword />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Change Password')}
|
||||
description={t('Updating your password from time to time enhaces your account’s security')}
|
||||
children={<ChangePassword />}
|
||||
/>
|
||||
|
||||
<div className="border-b my-10" />
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Organization API Key')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Your API key gives you access to an extra set of services.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Api />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Interface Language')}
|
||||
description={t('Select the language in which OpenReplay will appear.')}
|
||||
children={<LanguageSwitcher />}
|
||||
/>
|
||||
|
||||
<Section
|
||||
title={t('Organization API Key')}
|
||||
description={t('Your API key gives you access to an extra set of services.')}
|
||||
children={<Api />}
|
||||
/>
|
||||
|
||||
{isEnterprise && (account.admin || account.superAdmin) && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Tenant Key')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('For SSO (SAML) authentication.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<TenantKey />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Tenant Key')}
|
||||
description={t('For SSO (SAML) authentication.')}
|
||||
children={<TenantKey />}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{!isEnterprise && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Data Collection')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<OptOut />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Data Collection')}
|
||||
description={t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
||||
children={<OptOut />}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{account.license && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('License')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('License key and expiration date.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Licenses />
|
||||
</div>
|
||||
</div>
|
||||
<Section title={t('License')} description={t('License key and expiration date.')} children={<Licenses />} />
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Section({ title, description, children }: {
|
||||
title: string;
|
||||
description: string;
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{title}</h4>
|
||||
<div className={styles.info}>
|
||||
{description}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default withPageTitle('Account - OpenReplay Preferences')(
|
||||
observer(ProfileSettings),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import { useTranslation } from 'react-i18next';
|
|||
const initTableProps = [
|
||||
{
|
||||
title: <span className="font-medium">Series</span>,
|
||||
_pureTitle: 'Series',
|
||||
dataIndex: 'seriesName',
|
||||
key: 'seriesName',
|
||||
sorter: (a, b) => a.seriesName.localeCompare(b.seriesName),
|
||||
|
|
@ -18,6 +19,7 @@ const initTableProps = [
|
|||
},
|
||||
{
|
||||
title: <span className="font-medium">Avg.</span>,
|
||||
_pureTitle: 'Avg.',
|
||||
dataIndex: 'average',
|
||||
key: 'average',
|
||||
sorter: (a, b) => a.average - b.average,
|
||||
|
|
@ -94,6 +96,8 @@ function WidgetDatatable(props: Props) {
|
|||
tableCols.push({
|
||||
title: <span className="font-medium">{name}</span>,
|
||||
dataIndex: `${name}_${i}`,
|
||||
// @ts-ignore
|
||||
_pureTitle: name,
|
||||
key: `${name}_${i}`,
|
||||
sorter: (a, b) => a[`${name}_${i}`] - b[`${name}_${i}`],
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,52 +1,80 @@
|
|||
import React, { useEffect } from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { Form, Input, Loader, Icon, Message } from 'UI';
|
||||
import { Button } from 'antd';
|
||||
import { validatePassword } from 'App/validate';
|
||||
import { PASSWORD_POLICY } from 'App/constants';
|
||||
import stl from './forgotPassword.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
const recaptchaRef = React.createRef();
|
||||
const ERROR_DONT_MATCH = (t) => t("Passwords don't match.");
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
|
||||
interface Props {
|
||||
params: any;
|
||||
}
|
||||
function CreatePassword(props: Props) {
|
||||
|
||||
function CreatePassword(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { params } = props;
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { resetPassword } = userStore;
|
||||
const [error, setError] = React.useState<string | null>(null);
|
||||
const [validationError, setValidationError] = React.useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [updated, setUpdated] = React.useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = React.useState('');
|
||||
const [password, setPassword] = React.useState('');
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [validationError, setValidationError] = useState<string | null>(null);
|
||||
const [updated, setUpdated] = useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
|
||||
const pass = params.get('pass');
|
||||
const invitation = params.get('invitation');
|
||||
|
||||
const handleSubmit = () => {
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
if (!validatePassword(password)) {
|
||||
return;
|
||||
}
|
||||
void resetPassword({ invitation, pass, password });
|
||||
|
||||
resetPassword({
|
||||
invitation,
|
||||
pass,
|
||||
password,
|
||||
'g-recaptcha-response': token
|
||||
})
|
||||
.then(() => {
|
||||
setUpdated(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = (e: any) => {
|
||||
e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
const onSubmit = () => {
|
||||
// Validate before attempting captcha verification
|
||||
if (!validatePassword(password) || password !== passwordRepeat) {
|
||||
setValidationError(
|
||||
password !== passwordRepeat
|
||||
? ERROR_DONT_MATCH(t)
|
||||
: PASSWORD_POLICY(t)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset any previous errors
|
||||
setError(null);
|
||||
setValidationError(null);
|
||||
|
||||
submitWithCaptcha({ pass, invitation, password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
// The component will handle showing appropriate messages
|
||||
});
|
||||
};
|
||||
|
||||
const write = (e: any) => {
|
||||
|
|
@ -63,7 +91,7 @@ function CreatePassword(props: Props) {
|
|||
} else {
|
||||
setValidationError(null);
|
||||
}
|
||||
}, [passwordRepeat, password]);
|
||||
}, [passwordRepeat, password, t]);
|
||||
|
||||
return (
|
||||
<Form
|
||||
|
|
@ -73,19 +101,8 @@ function CreatePassword(props: Props) {
|
|||
>
|
||||
{!error && (
|
||||
<>
|
||||
<Loader loading={loading}>
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div data-hidden={updated} className="w-full">
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className={stl.recaptcha}>
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Form.Field>
|
||||
<label>{t('New password')}</label>
|
||||
<Input
|
||||
|
|
@ -132,10 +149,15 @@ function CreatePassword(props: Props) {
|
|||
<Button
|
||||
htmlType="submit"
|
||||
type="primary"
|
||||
loading={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha || validationError !== null}
|
||||
className="w-full mt-4"
|
||||
>
|
||||
{t('Create')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Create')}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
|
|
@ -153,4 +175,4 @@ function CreatePassword(props: Props) {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(CreatePassword);
|
||||
export default withCaptcha(observer(CreatePassword));
|
||||
|
|
|
|||
|
|
@ -1,24 +1,26 @@
|
|||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import { Loader, Icon } from 'UI';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { Form, Input, Button, Typography } from 'antd';
|
||||
import { SquareArrowOutUpRight } from 'lucide-react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
function ResetPasswordRequest() {
|
||||
interface Props {
|
||||
}
|
||||
|
||||
function ResetPasswordRequest(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { requestResetPassword } = userStore;
|
||||
const recaptchaRef = React.createRef();
|
||||
const [requested, setRequested] = React.useState(false);
|
||||
const [email, setEmail] = React.useState('');
|
||||
const [error, setError] = React.useState(null);
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
const [smtpError, setSmtpError] = React.useState<boolean>(false);
|
||||
const [requested, setRequested] = useState(false);
|
||||
const [email, setEmail] = useState('');
|
||||
const [error, setError] = useState(null);
|
||||
const [smtpError, setSmtpError] = useState<boolean>(false);
|
||||
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const write = (e: any) => {
|
||||
const { name, value } = e.target;
|
||||
|
|
@ -26,22 +28,21 @@ function ResetPasswordRequest() {
|
|||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
// e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
// Validation check
|
||||
if (!email || email.trim() === '') {
|
||||
return;
|
||||
}
|
||||
|
||||
submitWithCaptcha({ email: email.trim() })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
});
|
||||
};
|
||||
|
||||
const handleSubmit = (token?: any) => {
|
||||
if (
|
||||
CAPTCHA_ENABLED &&
|
||||
recaptchaRef.current &&
|
||||
(token === null || token === undefined)
|
||||
)
|
||||
return;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
setError(null);
|
||||
requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token })
|
||||
.catch((err: any) => {
|
||||
|
|
@ -50,29 +51,21 @@ function ResetPasswordRequest() {
|
|||
}
|
||||
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
})
|
||||
.finally(() => {
|
||||
setRequested(true);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
onFinish={onSubmit}
|
||||
style={{ minWidth: '50%' }}
|
||||
className="flex flex-col"
|
||||
>
|
||||
<Loader loading={false}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className="flex justify-center">
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
data-hidden={requested}
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
{!requested && (
|
||||
<>
|
||||
<Form.Item>
|
||||
|
|
@ -92,10 +85,14 @@ function ResetPasswordRequest() {
|
|||
<Button
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Email Password Reset Link')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Email Password Reset Link')}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
|
@ -146,4 +143,4 @@ function ResetPasswordRequest() {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(ResetPasswordRequest);
|
||||
export default withCaptcha(observer(ResetPasswordRequest));
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
import { Button, Dropdown, MenuProps, Space, Typography } from 'antd';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { Button, Dropdown, MenuProps, Typography } from 'antd';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CaretDownOutlined } from '@ant-design/icons';
|
||||
import { Languages } from 'lucide-react';
|
||||
import { Icon } from '../ui';
|
||||
import { ChevronDown } from 'lucide-react';
|
||||
|
||||
const langs = [
|
||||
{ code: 'en', label: 'English' },
|
||||
|
|
@ -12,14 +10,25 @@ const langs = [
|
|||
{ code: 'ru', label: 'Русский' },
|
||||
{ code: 'zh', label: '中國人' },
|
||||
];
|
||||
const langLabels = {
|
||||
en: 'English',
|
||||
fr: 'Français',
|
||||
es: 'Español',
|
||||
ru: 'Русский',
|
||||
zh: '中國人',
|
||||
}
|
||||
|
||||
function LanguageSwitcher() {
|
||||
const { i18n } = useTranslation();
|
||||
const [selected, setSelected] = React.useState(i18n.language);
|
||||
|
||||
const handleChangeLanguage = useCallback((lang: string) => {
|
||||
i18n.changeLanguage(lang);
|
||||
localStorage.setItem('i18nextLng', lang);
|
||||
}, []);
|
||||
const onChange = (val: string) => {
|
||||
setSelected(val)
|
||||
}
|
||||
const handleChangeLanguage = () => {
|
||||
void i18n.changeLanguage(selected)
|
||||
localStorage.setItem('i18nextLng', selected)
|
||||
}
|
||||
|
||||
const menuItems: MenuProps['items'] = langs.map((lang) => ({
|
||||
key: lang.code,
|
||||
|
|
@ -31,6 +40,8 @@ function LanguageSwitcher() {
|
|||
}));
|
||||
|
||||
return (
|
||||
<div className={'flex flex-col gap-2 align-start'}>
|
||||
<div className={'font-semibold'}>{i18n.t('Language')}</div>
|
||||
<Dropdown
|
||||
menu={{
|
||||
items: menuItems,
|
||||
|
|
@ -40,12 +51,20 @@ function LanguageSwitcher() {
|
|||
maxHeight: 500,
|
||||
overflowY: 'auto',
|
||||
},
|
||||
onClick: (e) => handleChangeLanguage(e.key),
|
||||
onClick: (e) => onChange(e.key),
|
||||
}}
|
||||
placement="bottomLeft"
|
||||
>
|
||||
<Button icon={<Languages size={12} />} />
|
||||
<Button>
|
||||
<div className={'flex justify-between items-center gap-8'}>
|
||||
<span>{langLabels[selected]}</span>
|
||||
<ChevronDown size={14} />
|
||||
</div>
|
||||
</Button>
|
||||
</Dropdown>
|
||||
<Button className={'w-fit'} onClick={handleChangeLanguage}>
|
||||
{i18n.t('Update')}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +1,18 @@
|
|||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import cn from 'classnames';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
// Consider using a different approach for titles in functional components
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { toast } from 'react-toastify';
|
||||
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import { forgotPassword, signup } from 'App/routes';
|
||||
import { Icon, Link, Loader, Tooltip } from 'UI';
|
||||
import { Icon, Link, Loader } from 'UI';
|
||||
import { Button, Form, Input } from 'antd';
|
||||
|
||||
import Copyright from 'Shared/Copyright';
|
||||
|
||||
import stl from './login.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useStore } from 'App/mstore';
|
||||
import LanguageSwitcher from '../LanguageSwitcher';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
import SSOLogin from './SSOLogin';
|
||||
|
||||
const FORGOT_PASSWORD = forgotPassword();
|
||||
const SIGNUP_ROUTE = signup();
|
||||
|
|
@ -26,14 +21,15 @@ interface LoginProps {
|
|||
location: Location;
|
||||
}
|
||||
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
|
||||
function Login({ location }: LoginProps) {
|
||||
function Login({
|
||||
location,
|
||||
submitWithCaptcha,
|
||||
isVerifyingCaptcha,
|
||||
resetCaptcha,
|
||||
}: LoginProps & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const [email, setEmail] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
// const CAPTCHA_ENABLED = useMemo(() => window.env.CAPTCHA_ENABLED === 'true', []);
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
const { loginStore, userStore } = useStore();
|
||||
const { errors } = userStore.loginRequest;
|
||||
const { loading } = loginStore;
|
||||
|
|
@ -49,7 +45,6 @@ function Login({ location }: LoginProps) {
|
|||
}, [authDetails]);
|
||||
|
||||
useEffect(() => {
|
||||
// void fetchTenants();
|
||||
const jwt = params.get('jwt');
|
||||
const spotJwt = params.get('spotJwt');
|
||||
if (spotJwt) {
|
||||
|
|
@ -108,32 +103,36 @@ function Login({ location }: LoginProps) {
|
|||
if (resp) {
|
||||
userStore.syntheticLogin(resp);
|
||||
setJwt({ jwt: resp.jwt, spotJwt: resp.spotJwt ?? null });
|
||||
if (resp.spotJwt) {
|
||||
handleSpotLogin(resp.spotJwt);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
userStore.syntheticLoginError(e);
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
if (!email || !password) {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
const ssoLink =
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
submitWithCaptcha({ email: email.trim(), password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha error:', error);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center h-screen">
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="m-10 ">
|
||||
<img src="/assets/logo.svg" width={200} />
|
||||
<img src="/assets/logo.svg" width={200} alt="Company Logo" />
|
||||
</div>
|
||||
<div className="border rounded-lg bg-white shadow-sm">
|
||||
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
|
||||
|
|
@ -145,15 +144,7 @@ function Login({ location }: LoginProps) {
|
|||
className={cn('flex items-center justify-center flex-col')}
|
||||
style={{ width: '350px' }}
|
||||
>
|
||||
<Loader loading={loading}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={window.env.CAPTCHA_SITE_KEY}
|
||||
onChange={(token) => handleSubmit(token)}
|
||||
/>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div style={{ width: '350px' }} className="px-8">
|
||||
<Form.Item>
|
||||
<label>{t('Email Address')}</label>
|
||||
|
|
@ -186,8 +177,8 @@ function Login({ location }: LoginProps) {
|
|||
</Loader>
|
||||
{errors && errors.length ? (
|
||||
<div className="px-8 my-2 w-full">
|
||||
{errors.map((error) => (
|
||||
<div className="flex items-center bg-red-lightest rounded p-3">
|
||||
{errors.map((error, index) => (
|
||||
<div key={index} className="flex items-center bg-red-lightest rounded p-3">
|
||||
<Icon name="info" color="red" size="20" />
|
||||
<span className="color-red ml-2">
|
||||
{error}
|
||||
|
|
@ -204,8 +195,14 @@ function Login({ location }: LoginProps) {
|
|||
className="mt-2 w-full text-center rounded-lg"
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Login')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Logging in...')
|
||||
: t('Login')}
|
||||
</Button>
|
||||
|
||||
<div className="my-8 flex justify-center items-center flex-wrap">
|
||||
|
|
@ -219,64 +216,13 @@ function Login({ location }: LoginProps) {
|
|||
</div>
|
||||
</Form>
|
||||
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
delay={0}
|
||||
title={
|
||||
<div className="text-center">
|
||||
{authDetails.edition === 'ee' ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
<SSOLogin authDetails={authDetails} />
|
||||
</div>
|
||||
|
||||
{authDetails?.enforceSSO && (
|
||||
<SSOLogin authDetails={authDetails} enforceSSO={true} />
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
className="pointer-events-none opacity-30"
|
||||
>
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn('flex items-center w-96 justify-center my-8', {
|
||||
'!hidden': !authDetails?.enforceSSO,
|
||||
})}
|
||||
>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Copyright />
|
||||
|
|
@ -287,4 +233,6 @@ function Login({ location }: LoginProps) {
|
|||
);
|
||||
}
|
||||
|
||||
export default withPageTitle('Login - OpenReplay')(observer(Login));
|
||||
export default withPageTitle('Login - OpenReplay')(
|
||||
withCaptcha(observer(Login))
|
||||
);
|
||||
|
|
|
|||
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import React from 'react';
|
||||
import cn from 'classnames';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import stl from './login.module.css';
|
||||
import { useStore } from 'App/mstore';
|
||||
|
||||
interface SSOLoginProps {
|
||||
authDetails: any;
|
||||
enforceSSO?: boolean;
|
||||
}
|
||||
|
||||
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||
const { userStore } = useStore();
|
||||
const { t } = useTranslation();
|
||||
const { isEnterprise } = userStore;
|
||||
|
||||
const getSSOLink = () =>
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
|
||||
const ssoLink = getSSOLink();
|
||||
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`;
|
||||
|
||||
if (enforceSSO) {
|
||||
return (
|
||||
<div className={cn('flex items-center w-96 justify-center my-8')}>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">{ssoButtonText}</Button>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
title={
|
||||
<div className="text-center">
|
||||
{isEnterprise ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<span className="cursor-not-allowed">
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
disabled={true}
|
||||
>
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SSOLogin;
|
||||
|
|
@ -1,16 +1,14 @@
|
|||
import React from 'react';
|
||||
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router';
|
||||
import { withRouter } from 'react-router-dom';
|
||||
|
||||
import { OB_TABS, onboarding as onboardingRoute, withSiteId } from 'App/routes';
|
||||
import { Icon } from 'UI';
|
||||
|
||||
import IdentifyUsersTab from './components/IdentifyUsersTab';
|
||||
import InstallOpenReplayTab from './components/InstallOpenReplayTab';
|
||||
import IntegrationsTab from './components/IntegrationsTab';
|
||||
import ManageUsersTab from './components/ManageUsersTab';
|
||||
import SideMenu from './components/SideMenu';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Smartphone, AppWindow } from 'lucide-react';
|
||||
|
||||
interface Props {
|
||||
match: {
|
||||
|
|
@ -33,7 +31,7 @@ function Onboarding(props: Props) {
|
|||
{
|
||||
label: (
|
||||
<div className="font-semibold flex gap-2 items-center">
|
||||
<Icon name="browser/browser" size={16} />
|
||||
<AppWindow size={16} />
|
||||
{t('Web')}
|
||||
</div>
|
||||
),
|
||||
|
|
@ -42,7 +40,7 @@ function Onboarding(props: Props) {
|
|||
{
|
||||
label: (
|
||||
<div className="font-semibold flex gap-2 items-center">
|
||||
<Icon name="mobile" size={16} />
|
||||
<Smartphone size={16} />
|
||||
{t('Mobile')}
|
||||
</div>
|
||||
),
|
||||
|
|
|
|||
|
|
@ -130,18 +130,20 @@ function IdentifyUsersTab(props: Props) {
|
|||
'To identify users through metadata, you will have to explicitly specify your user metadata so it can be injected during sessions. Follow the below steps',
|
||||
)}
|
||||
</p>
|
||||
<div className="flex items-start">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<CircleNumber text="1" />
|
||||
<MetadataList />
|
||||
</div>
|
||||
|
||||
<div className="my-6" />
|
||||
<div className="flex items-start">
|
||||
<div>
|
||||
<CircleNumber text="2" />
|
||||
<div className="pt-1 w-full">
|
||||
<span className="font-bold">
|
||||
{t('Inject metadata when recording sessions')}
|
||||
</span>
|
||||
</div>
|
||||
<div className="pt-1 w-full">
|
||||
<div className="my-2">
|
||||
{t('Use the')}
|
||||
<span className="highlight-blue">setMetadata</span>{' '}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import MobileOnboardingTabs from '../OnboardingTabs/OnboardingMobileTabs';
|
|||
import ProjectFormButton from '../ProjectFormButton';
|
||||
import withOnboarding, { WithOnboardingProps } from '../withOnboarding';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
|
||||
interface Props extends WithOnboardingProps {
|
||||
platforms: Array<{
|
||||
|
|
@ -45,8 +46,8 @@ function InstallOpenReplayTab(props: Props) {
|
|||
</div>
|
||||
<a href={"https://docs.openreplay.com/en/sdk/using-or/"} target="_blank">
|
||||
<Button size={"small"} type={"text"} className="ml-2 flex items-center gap-2">
|
||||
<Icon name={"question-circle"} />
|
||||
<div className={"text-main"}>{t('See Documentation')}</div>
|
||||
<CircleHelp size={14} />
|
||||
<div>{t('See Documentation')}</div>
|
||||
</Button>
|
||||
</a>
|
||||
</h1>
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ function MetadataList() {
|
|||
<Button type="default" onClick={() => openModal()}>
|
||||
{t('Add Metadata')}
|
||||
</Button>
|
||||
<div className="flex ml-2">
|
||||
{fields.map((f, index) => (
|
||||
<TagBadge
|
||||
key={index}
|
||||
|
|
@ -65,7 +64,6 @@ function MetadataList() {
|
|||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
import React from 'react'
|
||||
import DocCard from "App/components/shared/DocCard";
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Mail } from 'lucide-react'
|
||||
import { CopyButton } from "UI";
|
||||
|
||||
export function CollabCard({ showUserModal }: { showUserModal: () => void }) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<DocCard title={t('Need help from team member?')}>
|
||||
<div className={'text-main cursor-pointer flex items-center gap-2'} onClick={showUserModal}>
|
||||
<Mail size={14} />
|
||||
<span>
|
||||
{t('Invite and Collaborate')}
|
||||
</span>
|
||||
</div>
|
||||
</DocCard>
|
||||
)
|
||||
}
|
||||
|
||||
export function ProjectKeyCard({ projectKey }: { projectKey: string }) {
|
||||
const { t } = useTranslation();
|
||||
return (
|
||||
<DocCard title={t('Project Key')}>
|
||||
<div className="p-2 rounded bg-white flex justify-between items-center">
|
||||
<div className={'font-mono'}>{projectKey}</div>
|
||||
<CopyButton content={projectKey} className={'capitalize font-medium text-neutral-400'} />
|
||||
</div>
|
||||
</DocCard>
|
||||
)
|
||||
}
|
||||
|
|
@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
|
|||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
const installationCommand = 'npm i @openreplay/tracker';
|
||||
const usageCode = `import Tracker from '@openreplay/tracker';
|
||||
const usageCode = `import { tracker } from '@openreplay/tracker';
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
tracker.start()`;
|
||||
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
|
||||
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import DocCard from 'Shared/DocCard/DocCard';
|
|||
import { useModal } from 'App/components/Modal';
|
||||
import UserForm from 'App/components/Client/Users/components/UserForm/UserForm';
|
||||
import AndroidInstallDocs from 'Components/Onboarding/components/OnboardingTabs/InstallDocs/AndroidInstallDocs';
|
||||
import { CollabCard, ProjectKeyCard } from "./Callouts";
|
||||
import MobileInstallDocs from './InstallDocs/MobileInstallDocs';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
|
|
@ -39,18 +40,9 @@ function MobileTrackingCodeModal(props: Props) {
|
|||
</div>
|
||||
|
||||
<div className="col-span-2">
|
||||
<DocCard title={t('Need help from team member?')}>
|
||||
<a className="link" onClick={showUserModal}>
|
||||
{t('Invite and Collaborate')}
|
||||
</a>
|
||||
</DocCard>
|
||||
<CollabCard showUserModal={showUserModal} />
|
||||
|
||||
<DocCard title={t('Project Key')}>
|
||||
<div className="p-2 rounded bg-white flex justify-between items-center">
|
||||
{site.projectKey}
|
||||
<CopyButton content={site.projectKey} />
|
||||
</div>
|
||||
</DocCard>
|
||||
<ProjectKeyCard projectKey={site.projectKey} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
@ -62,18 +54,9 @@ function MobileTrackingCodeModal(props: Props) {
|
|||
</div>
|
||||
|
||||
<div className="col-span-2">
|
||||
<DocCard title={t('Need help from team member?')}>
|
||||
<a className="link" onClick={showUserModal}>
|
||||
{t('Invite and Collaborate')}
|
||||
</a>
|
||||
</DocCard>
|
||||
<CollabCard showUserModal={showUserModal} />
|
||||
|
||||
<DocCard title={t('Project Key')}>
|
||||
<div className="p-2 rounded bg-white flex justify-between items-center">
|
||||
{site.projectKey}
|
||||
<CopyButton content={site.projectKey} />
|
||||
</div>
|
||||
</DocCard>
|
||||
<ProjectKeyCard projectKey={site.projectKey} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { Tabs, Icon, CopyButton } from 'UI';
|
|||
import DocCard from 'Shared/DocCard/DocCard';
|
||||
import { useModal } from 'App/components/Modal';
|
||||
import UserForm from 'App/components/Client/Users/components/UserForm/UserForm';
|
||||
import { CollabCard, ProjectKeyCard } from "./Callouts";
|
||||
import InstallDocs from './InstallDocs';
|
||||
import ProjectCodeSnippet from './ProjectCodeSnippet';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
|
@ -37,20 +38,9 @@ function TrackingCodeModal(props: Props) {
|
|||
</div>
|
||||
|
||||
<div className="col-span-2">
|
||||
<DocCard title="Need help from team member?">
|
||||
<a className="link" onClick={showUserModal}>
|
||||
{t('Invite and Collaborate')}
|
||||
</a>
|
||||
</DocCard>
|
||||
<DocCard title="Project Key">
|
||||
<div className="rounded bg-white px-2 py-1 flex items-center justify-between">
|
||||
<span>{site.projectKey}</span>
|
||||
<CopyButton
|
||||
content={site.projectKey}
|
||||
className="capitalize"
|
||||
/>
|
||||
</div>
|
||||
</DocCard>
|
||||
<CollabCard showUserModal={showUserModal} />
|
||||
|
||||
<ProjectKeyCard projectKey={site.projectKey} />
|
||||
<DocCard title="Other ways to install">
|
||||
<a
|
||||
className="link flex items-center"
|
||||
|
|
@ -77,18 +67,9 @@ function TrackingCodeModal(props: Props) {
|
|||
</div>
|
||||
|
||||
<div className="col-span-2">
|
||||
<DocCard title="Need help from team member?">
|
||||
<a className="link" onClick={showUserModal}>
|
||||
{t('Invite and Collaborate')}
|
||||
</a>
|
||||
</DocCard>
|
||||
<CollabCard showUserModal={showUserModal} />
|
||||
|
||||
<DocCard title="Project Key">
|
||||
<div className="p-2 rounded bg-white flex justify-between items-center">
|
||||
{site.projectKey}
|
||||
<CopyButton content={site.projectKey} />
|
||||
</div>
|
||||
</DocCard>
|
||||
<ProjectKeyCard projectKey={site.projectKey} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ function SideMenu(props: Props) {
|
|||
<Menu
|
||||
mode="inline"
|
||||
onClick={handleClick}
|
||||
style={{ marginTop: '8px', border: 'none' }}
|
||||
style={{ border: 'none' }}
|
||||
selectedKeys={activeTab ? [activeTab] : []}
|
||||
>
|
||||
<Menu.Item
|
||||
|
|
|
|||
|
|
@ -65,7 +65,6 @@ function GraphQL({ panelHeight }: { panelHeight: number }) {
|
|||
|
||||
const filterList = (list: any, value: string) => {
|
||||
const filterRE = getRE(value, 'i');
|
||||
const { t } = useTranslation();
|
||||
|
||||
return value
|
||||
? list.filter(
|
||||
|
|
|
|||
|
|
@ -1,9 +1,17 @@
|
|||
/* eslint-disable i18next/no-literal-string */
|
||||
import { ResourceType, Timed } from 'Player';
|
||||
import { WsChannel } from 'Player/web/messages';
|
||||
import MobilePlayer from 'Player/mobile/IOSPlayer';
|
||||
import WebPlayer from 'Player/web/WebPlayer';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import React, {
|
||||
useMemo,
|
||||
useState,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import i18n from 'App/i18n'
|
||||
|
||||
import { useModal } from 'App/components/Modal';
|
||||
import {
|
||||
|
|
@ -12,25 +20,27 @@ import {
|
|||
} from 'App/components/Session/playerContext';
|
||||
import { formatMs } from 'App/date';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { formatBytes } from 'App/utils';
|
||||
import { formatBytes, debounceCall } from 'App/utils';
|
||||
import { Icon, NoContent, Tabs } from 'UI';
|
||||
import { Tooltip, Input, Switch, Form } from 'antd';
|
||||
import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
SearchOutlined,
|
||||
InfoCircleOutlined,
|
||||
} from '@ant-design/icons';
|
||||
|
||||
import FetchDetailsModal from 'Shared/FetchDetailsModal';
|
||||
import { WsChannel } from 'App/player/web/messages';
|
||||
|
||||
import BottomBlock from '../BottomBlock';
|
||||
import InfoLine from '../BottomBlock/InfoLine';
|
||||
import TabSelector from '../TabSelector';
|
||||
import TimeTable from '../TimeTable';
|
||||
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
|
||||
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
|
||||
import WSPanel from './WSPanel';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { mergeListsWithZoom, processInChunks } from './utils'
|
||||
|
||||
// Constants remain the same
|
||||
const INDEX_KEY = 'network';
|
||||
|
||||
const ALL = 'ALL';
|
||||
const XHR = 'xhr';
|
||||
const JS = 'js';
|
||||
|
|
@ -62,6 +72,9 @@ export const NETWORK_TABS = TAP_KEYS.map((tab) => ({
|
|||
const DOM_LOADED_TIME_COLOR = 'teal';
|
||||
const LOAD_TIME_COLOR = 'red';
|
||||
|
||||
const BATCH_SIZE = 2500;
|
||||
const INITIAL_LOAD_SIZE = 5000;
|
||||
|
||||
export function renderType(r: any) {
|
||||
return (
|
||||
<Tooltip style={{ width: '100%' }} title={<div>{r.type}</div>}>
|
||||
|
|
@ -79,13 +92,17 @@ export function renderName(r: any) {
|
|||
}
|
||||
|
||||
function renderSize(r: any) {
|
||||
const { t } = useTranslation();
|
||||
if (r.responseBodySize) return formatBytes(r.responseBodySize);
|
||||
const t = i18n.t;
|
||||
const notCaptured = t('Not captured');
|
||||
const resSizeStr = t('Resource size')
|
||||
let triggerText;
|
||||
let content;
|
||||
if (r.decodedBodySize == null || r.decodedBodySize === 0) {
|
||||
if (r.responseBodySize) {
|
||||
triggerText = formatBytes(r.responseBodySize);
|
||||
content = undefined;
|
||||
} else if (r.decodedBodySize == null || r.decodedBodySize === 0) {
|
||||
triggerText = 'x';
|
||||
content = t('Not captured');
|
||||
content = notCaptured;
|
||||
} else {
|
||||
const headerSize = r.headerSize || 0;
|
||||
const showTransferred = r.headerSize != null;
|
||||
|
|
@ -100,7 +117,7 @@ function renderSize(r: any) {
|
|||
)} transferred over network`}
|
||||
</li>
|
||||
)}
|
||||
<li>{`${t('Resource size')}: ${formatBytes(r.decodedBodySize)} `}</li>
|
||||
<li>{`${resSizeStr}: ${formatBytes(r.decodedBodySize)} `}</li>
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
|
|
@ -168,6 +185,8 @@ function renderStatus({
|
|||
);
|
||||
}
|
||||
|
||||
|
||||
// Main component for Network Panel
|
||||
function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
||||
const { player, store } = React.useContext(PlayerContext);
|
||||
const { sessionStore, uiPlayerStore } = useStore();
|
||||
|
|
@ -216,6 +235,7 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
|
||||
const getTabNum = (tab: string) => tabsArr.findIndex((t) => t === tab) + 1;
|
||||
const getTabName = (tabId: string) => tabNames[tabId];
|
||||
|
||||
return (
|
||||
<NetworkPanelComp
|
||||
loadTime={loadTime}
|
||||
|
|
@ -228,8 +248,8 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
resourceListNow={resourceListNow}
|
||||
player={player}
|
||||
startedAt={startedAt}
|
||||
websocketList={websocketList as WSMessage[]}
|
||||
websocketListNow={websocketListNow as WSMessage[]}
|
||||
websocketList={websocketList}
|
||||
websocketListNow={websocketListNow}
|
||||
getTabNum={getTabNum}
|
||||
getTabName={getTabName}
|
||||
showSingleTab={showSingleTab}
|
||||
|
|
@ -269,9 +289,7 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
resourceListNow={resourceListNow}
|
||||
player={player}
|
||||
startedAt={startedAt}
|
||||
// @ts-ignore
|
||||
websocketList={websocketList}
|
||||
// @ts-ignore
|
||||
websocketListNow={websocketListNow}
|
||||
zoomEnabled={zoomEnabled}
|
||||
zoomStartTs={zoomStartTs}
|
||||
|
|
@ -280,12 +298,35 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
);
|
||||
}
|
||||
|
||||
type WSMessage = Timed & {
|
||||
channelName: string;
|
||||
data: string;
|
||||
timestamp: number;
|
||||
dir: 'up' | 'down';
|
||||
messageType: string;
|
||||
const useInfiniteScroll = (loadMoreCallback: () => void, hasMore: boolean) => {
|
||||
const observerRef = useRef<IntersectionObserver>(null);
|
||||
const loadingRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting && hasMore) {
|
||||
loadMoreCallback();
|
||||
}
|
||||
},
|
||||
{ threshold: 0.1 },
|
||||
);
|
||||
|
||||
if (loadingRef.current) {
|
||||
observer.observe(loadingRef.current);
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
observerRef.current = observer;
|
||||
|
||||
return () => {
|
||||
if (observerRef.current) {
|
||||
observerRef.current.disconnect();
|
||||
}
|
||||
};
|
||||
}, [loadMoreCallback, hasMore, loadingRef]);
|
||||
|
||||
return loadingRef;
|
||||
};
|
||||
|
||||
interface Props {
|
||||
|
|
@ -302,8 +343,8 @@ interface Props {
|
|||
resourceList: Timed[];
|
||||
fetchListNow: Timed[];
|
||||
resourceListNow: Timed[];
|
||||
websocketList: Array<WSMessage>;
|
||||
websocketListNow: Array<WSMessage>;
|
||||
websocketList: Array<WsChannel>;
|
||||
websocketListNow: Array<WsChannel>;
|
||||
player: WebPlayer | MobilePlayer;
|
||||
startedAt: number;
|
||||
isMobile?: boolean;
|
||||
|
|
@ -349,58 +390,66 @@ export const NetworkPanelComp = observer(
|
|||
>(null);
|
||||
const { showModal } = useModal();
|
||||
const [showOnlyErrors, setShowOnlyErrors] = useState(false);
|
||||
|
||||
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const [displayedItems, setDisplayedItems] = useState([]);
|
||||
const [totalItems, setTotalItems] = useState(0);
|
||||
const [summaryStats, setSummaryStats] = useState({
|
||||
resourcesSize: 0,
|
||||
transferredSize: 0,
|
||||
});
|
||||
|
||||
const originalListRef = useRef([]);
|
||||
const socketListRef = useRef([]);
|
||||
|
||||
const {
|
||||
sessionStore: { devTools },
|
||||
} = useStore();
|
||||
const { filter } = devTools[INDEX_KEY];
|
||||
const { activeTab } = devTools[INDEX_KEY];
|
||||
const activeIndex = activeOutsideIndex ?? devTools[INDEX_KEY].index;
|
||||
const [inputFilterValue, setInputFilterValue] = useState(filter);
|
||||
|
||||
const socketList = useMemo(
|
||||
() =>
|
||||
websocketList.filter(
|
||||
const debouncedFilter = useCallback(
|
||||
debounceCall((filterValue) => {
|
||||
devTools.update(INDEX_KEY, { filter: filterValue });
|
||||
}, 300),
|
||||
[],
|
||||
);
|
||||
|
||||
// Process socket lists once
|
||||
useEffect(() => {
|
||||
const uniqueSocketList = websocketList.filter(
|
||||
(ws, i, arr) =>
|
||||
arr.findIndex((it) => it.channelName === ws.channelName) === i,
|
||||
),
|
||||
[websocketList],
|
||||
);
|
||||
socketListRef.current = uniqueSocketList;
|
||||
}, [websocketList.length]);
|
||||
|
||||
const list = useMemo(
|
||||
() =>
|
||||
// TODO: better merge (with body size info) - do it in player
|
||||
resourceList
|
||||
.filter(
|
||||
(res) =>
|
||||
!fetchList.some((ft) => {
|
||||
// res.url !== ft.url doesn't work on relative URLs appearing within fetchList (to-fix in player)
|
||||
if (res.name === ft.name) {
|
||||
if (res.time === ft.time) return true;
|
||||
if (res.url.includes(ft.url)) {
|
||||
return (
|
||||
Math.abs(res.time - ft.time) < 350 ||
|
||||
Math.abs(res.timestamp - ft.timestamp) < 350
|
||||
);
|
||||
}
|
||||
}
|
||||
// Initial data processing - do this only once when data changes
|
||||
useEffect(() => {
|
||||
setIsLoading(true);
|
||||
|
||||
if (res.name !== ft.name) {
|
||||
return false;
|
||||
}
|
||||
if (Math.abs(res.time - ft.time) > 250) {
|
||||
return false;
|
||||
} // TODO: find good epsilons
|
||||
if (Math.abs(res.duration - ft.duration) > 200) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
// Heaviest operation here, will create a final merged network list
|
||||
const processData = async () => {
|
||||
const fetchUrls = new Set(
|
||||
fetchList.map((ft) => {
|
||||
return `${ft.name}-${Math.floor(ft.time / 100)}-${Math.floor(ft.duration / 100)}`;
|
||||
}),
|
||||
)
|
||||
.concat(fetchList)
|
||||
.concat(
|
||||
socketList.map((ws) => ({
|
||||
);
|
||||
|
||||
// We want to get resources that aren't in fetch list
|
||||
const filteredResources = await processInChunks(resourceList, (chunk) =>
|
||||
chunk.filter((res: any) => {
|
||||
const key = `${res.name}-${Math.floor(res.time / 100)}-${Math.floor(res.duration / 100)}`;
|
||||
return !fetchUrls.has(key);
|
||||
}),
|
||||
BATCH_SIZE,
|
||||
25,
|
||||
);
|
||||
|
||||
const processedSockets = socketListRef.current.map((ws: any) => ({
|
||||
...ws,
|
||||
type: 'websocket',
|
||||
method: 'ws',
|
||||
|
|
@ -409,47 +458,121 @@ export const NetworkPanelComp = observer(
|
|||
status: '101',
|
||||
duration: 0,
|
||||
transferredBodySize: 0,
|
||||
})),
|
||||
)
|
||||
.filter((req) =>
|
||||
zoomEnabled
|
||||
? req.time >= zoomStartTs! && req.time <= zoomEndTs!
|
||||
: true,
|
||||
)
|
||||
.sort((a, b) => a.time - b.time),
|
||||
[resourceList.length, fetchList.length, socketList.length],
|
||||
);
|
||||
}));
|
||||
|
||||
let filteredList = useMemo(() => {
|
||||
if (!showOnlyErrors) {
|
||||
return list;
|
||||
const mergedList: Timed[] = mergeListsWithZoom(
|
||||
filteredResources as Timed[],
|
||||
fetchList,
|
||||
processedSockets as Timed[],
|
||||
{ enabled: Boolean(zoomEnabled), start: zoomStartTs ?? 0, end: zoomEndTs ?? 0 }
|
||||
)
|
||||
|
||||
originalListRef.current = mergedList;
|
||||
setTotalItems(mergedList.length);
|
||||
|
||||
calculateResourceStats(resourceList);
|
||||
|
||||
// Only display initial chunk
|
||||
setDisplayedItems(mergedList.slice(0, INITIAL_LOAD_SIZE));
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
void processData();
|
||||
}, [
|
||||
resourceList.length,
|
||||
fetchList.length,
|
||||
socketListRef.current.length,
|
||||
zoomEnabled,
|
||||
zoomStartTs,
|
||||
zoomEndTs,
|
||||
]);
|
||||
|
||||
const calculateResourceStats = (resourceList: Record<string, any>) => {
|
||||
setTimeout(() => {
|
||||
let resourcesSize = 0
|
||||
let transferredSize = 0
|
||||
resourceList.forEach(({ decodedBodySize, headerSize, encodedBodySize }: any) => {
|
||||
resourcesSize += decodedBodySize || 0
|
||||
transferredSize += (headerSize || 0) + (encodedBodySize || 0)
|
||||
})
|
||||
|
||||
setSummaryStats({
|
||||
resourcesSize,
|
||||
transferredSize,
|
||||
});
|
||||
}, 0);
|
||||
}
|
||||
return list.filter(
|
||||
(it) => parseInt(it.status) >= 400 || !it.success || it.error,
|
||||
);
|
||||
}, [showOnlyErrors, list]);
|
||||
filteredList = useRegExListFilterMemo(
|
||||
filteredList,
|
||||
(it) => [it.status, it.name, it.type, it.method],
|
||||
filter,
|
||||
);
|
||||
filteredList = useTabListFilterMemo(
|
||||
filteredList,
|
||||
(it) => TYPE_TO_TAB[it.type],
|
||||
ALL,
|
||||
activeTab,
|
||||
|
||||
useEffect(() => {
|
||||
if (originalListRef.current.length === 0) return;
|
||||
setIsProcessing(true);
|
||||
const applyFilters = async () => {
|
||||
let filteredItems: any[] = originalListRef.current;
|
||||
|
||||
filteredItems = await processInChunks(filteredItems, (chunk) =>
|
||||
chunk.filter(
|
||||
(it) => {
|
||||
let valid = true;
|
||||
if (showOnlyErrors) {
|
||||
valid = parseInt(it.status) >= 400 || !it.success || it.error
|
||||
}
|
||||
if (filter) {
|
||||
try {
|
||||
const regex = new RegExp(filter, 'i');
|
||||
valid = valid && regex.test(it.status) || regex.test(it.name) || regex.test(it.type) || regex.test(it.method);
|
||||
} catch (e) {
|
||||
valid = valid && String(it.status).includes(filter) || it.name.includes(filter) || it.type.includes(filter) || (it.method && it.method.includes(filter));
|
||||
}
|
||||
}
|
||||
if (activeTab !== ALL) {
|
||||
valid = valid && TYPE_TO_TAB[it.type] === activeTab;
|
||||
}
|
||||
|
||||
return valid;
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
const onTabClick = (activeTab: (typeof TAP_KEYS)[number]) =>
|
||||
// Update displayed items
|
||||
setDisplayedItems(filteredItems.slice(0, INITIAL_LOAD_SIZE));
|
||||
setTotalItems(filteredItems.length);
|
||||
setIsProcessing(false);
|
||||
};
|
||||
|
||||
void applyFilters();
|
||||
}, [filter, activeTab, showOnlyErrors]);
|
||||
|
||||
const loadMoreItems = useCallback(() => {
|
||||
if (isProcessing) return;
|
||||
|
||||
setIsProcessing(true);
|
||||
setTimeout(() => {
|
||||
setDisplayedItems((prevItems) => {
|
||||
const currentLength = prevItems.length;
|
||||
const newItems = originalListRef.current.slice(
|
||||
currentLength,
|
||||
currentLength + BATCH_SIZE,
|
||||
);
|
||||
return [...prevItems, ...newItems];
|
||||
});
|
||||
setIsProcessing(false);
|
||||
}, 10);
|
||||
}, [isProcessing]);
|
||||
|
||||
const hasMoreItems = displayedItems.length < totalItems;
|
||||
const loadingRef = useInfiniteScroll(loadMoreItems, hasMoreItems);
|
||||
|
||||
const onTabClick = (activeTab) => {
|
||||
devTools.update(INDEX_KEY, { activeTab });
|
||||
const onFilterChange = ({
|
||||
target: { value },
|
||||
}: React.ChangeEvent<HTMLInputElement>) =>
|
||||
devTools.update(INDEX_KEY, { filter: value });
|
||||
};
|
||||
|
||||
const onFilterChange = ({ target: { value } }) => {
|
||||
setInputFilterValue(value)
|
||||
debouncedFilter(value);
|
||||
};
|
||||
|
||||
// AutoScroll
|
||||
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
|
||||
filteredList,
|
||||
displayedItems,
|
||||
getLastItemTime(fetchListNow, resourceListNow),
|
||||
activeIndex,
|
||||
(index) => devTools.update(INDEX_KEY, { index }),
|
||||
|
|
@ -462,24 +585,6 @@ export const NetworkPanelComp = observer(
|
|||
timeoutStartAutoscroll();
|
||||
};
|
||||
|
||||
const resourcesSize = useMemo(
|
||||
() =>
|
||||
resourceList.reduce(
|
||||
(sum, { decodedBodySize }) => sum + (decodedBodySize || 0),
|
||||
0,
|
||||
),
|
||||
[resourceList.length],
|
||||
);
|
||||
const transferredSize = useMemo(
|
||||
() =>
|
||||
resourceList.reduce(
|
||||
(sum, { headerSize, encodedBodySize }) =>
|
||||
sum + (headerSize || 0) + (encodedBodySize || 0),
|
||||
0,
|
||||
),
|
||||
[resourceList.length],
|
||||
);
|
||||
|
||||
const referenceLines = useMemo(() => {
|
||||
const arr = [];
|
||||
|
||||
|
|
@ -513,7 +618,7 @@ export const NetworkPanelComp = observer(
|
|||
isSpot={isSpot}
|
||||
time={item.time + startedAt}
|
||||
resource={item}
|
||||
rows={filteredList}
|
||||
rows={displayedItems}
|
||||
fetchPresented={fetchList.length > 0}
|
||||
/>,
|
||||
{
|
||||
|
|
@ -525,12 +630,12 @@ export const NetworkPanelComp = observer(
|
|||
},
|
||||
},
|
||||
);
|
||||
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
|
||||
devTools.update(INDEX_KEY, { index: displayedItems.indexOf(item) });
|
||||
stopAutoscroll();
|
||||
};
|
||||
|
||||
const tableCols = React.useMemo(() => {
|
||||
const cols: any[] = [
|
||||
const tableCols = useMemo(() => {
|
||||
const cols = [
|
||||
{
|
||||
label: t('Status'),
|
||||
dataKey: 'status',
|
||||
|
|
@ -585,7 +690,7 @@ export const NetworkPanelComp = observer(
|
|||
});
|
||||
}
|
||||
return cols;
|
||||
}, [showSingleTab]);
|
||||
}, [showSingleTab, activeTab, t, getTabName, getTabNum, isSpot]);
|
||||
|
||||
return (
|
||||
<BottomBlock
|
||||
|
|
@ -617,7 +722,7 @@ export const NetworkPanelComp = observer(
|
|||
name="filter"
|
||||
onChange={onFilterChange}
|
||||
width={280}
|
||||
value={filter}
|
||||
value={inputFilterValue}
|
||||
size="small"
|
||||
prefix={<SearchOutlined className="text-neutral-400" />}
|
||||
/>
|
||||
|
|
@ -625,7 +730,7 @@ export const NetworkPanelComp = observer(
|
|||
</BottomBlock.Header>
|
||||
<BottomBlock.Content>
|
||||
<div className="flex items-center justify-between px-4 border-b bg-teal/5 h-8">
|
||||
<div>
|
||||
<div className="flex items-center">
|
||||
<Form.Item name="show-errors-only" className="mb-0">
|
||||
<label
|
||||
style={{
|
||||
|
|
@ -642,21 +747,29 @@ export const NetworkPanelComp = observer(
|
|||
<span className="text-sm ms-2">4xx-5xx Only</span>
|
||||
</label>
|
||||
</Form.Item>
|
||||
|
||||
{isProcessing && (
|
||||
<span className="text-xs text-gray-500 ml-4">
|
||||
Processing data...
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<InfoLine>
|
||||
<InfoLine.Point label={`${totalItems}`} value="requests" />
|
||||
<InfoLine.Point
|
||||
label={`${filteredList.length}`}
|
||||
value=" requests"
|
||||
label={`${displayedItems.length}/${totalItems}`}
|
||||
value="displayed"
|
||||
display={displayedItems.length < totalItems}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatBytes(transferredSize)}
|
||||
label={formatBytes(summaryStats.transferredSize)}
|
||||
value="transferred"
|
||||
display={transferredSize > 0}
|
||||
display={summaryStats.transferredSize > 0}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatBytes(resourcesSize)}
|
||||
label={formatBytes(summaryStats.resourcesSize)}
|
||||
value="resources"
|
||||
display={resourcesSize > 0}
|
||||
display={summaryStats.resourcesSize > 0}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatMs(domBuildingTime)}
|
||||
|
|
@ -679,6 +792,15 @@ export const NetworkPanelComp = observer(
|
|||
/>
|
||||
</InfoLine>
|
||||
</div>
|
||||
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center h-full">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-gray-900 mx-auto mb-2"></div>
|
||||
<p>Processing initial network data...</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<NoContent
|
||||
title={
|
||||
<div className="capitalize flex items-center gap-2">
|
||||
|
|
@ -687,20 +809,20 @@ export const NetworkPanelComp = observer(
|
|||
</div>
|
||||
}
|
||||
size="small"
|
||||
show={filteredList.length === 0}
|
||||
show={displayedItems.length === 0}
|
||||
>
|
||||
{/* @ts-ignore */}
|
||||
<div>
|
||||
<TimeTable
|
||||
rows={filteredList}
|
||||
tableHeight={panelHeight - 102}
|
||||
rows={displayedItems}
|
||||
tableHeight={panelHeight - 102 - (hasMoreItems ? 30 : 0)}
|
||||
referenceLines={referenceLines}
|
||||
renderPopup
|
||||
onRowClick={showDetailsModal}
|
||||
sortBy="time"
|
||||
sortAscending
|
||||
onJump={(row: any) => {
|
||||
onJump={(row) => {
|
||||
devTools.update(INDEX_KEY, {
|
||||
index: filteredList.indexOf(row),
|
||||
index: displayedItems.indexOf(row),
|
||||
});
|
||||
player.jump(row.time);
|
||||
}}
|
||||
|
|
@ -708,6 +830,21 @@ export const NetworkPanelComp = observer(
|
|||
>
|
||||
{tableCols}
|
||||
</TimeTable>
|
||||
|
||||
{hasMoreItems && (
|
||||
<div
|
||||
ref={loadingRef}
|
||||
className="flex justify-center items-center text-xs text-gray-500"
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-gray-600 mr-2"></div>
|
||||
Loading more data ({totalItems - displayedItems.length}{' '}
|
||||
remaining)
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{selectedWsChannel ? (
|
||||
<WSPanel
|
||||
socketMsgList={selectedWsChannel}
|
||||
|
|
@ -715,6 +852,7 @@ export const NetworkPanelComp = observer(
|
|||
/>
|
||||
) : null}
|
||||
</NoContent>
|
||||
)}
|
||||
</BottomBlock.Content>
|
||||
</BottomBlock>
|
||||
);
|
||||
|
|
@ -722,7 +860,6 @@ export const NetworkPanelComp = observer(
|
|||
);
|
||||
|
||||
const WebNetworkPanel = observer(NetworkPanelCont);
|
||||
|
||||
const MobileNetworkPanel = observer(MobileNetworkPanelCont);
|
||||
|
||||
export { WebNetworkPanel, MobileNetworkPanel };
|
||||
|
|
|
|||
178
frontend/app/components/shared/DevTools/NetworkPanel/utils.ts
Normal file
178
frontend/app/components/shared/DevTools/NetworkPanel/utils.ts
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
export function mergeListsWithZoom<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(
|
||||
arr1: T[],
|
||||
arr2: Y[],
|
||||
arr3: Z[],
|
||||
zoom?: { enabled: boolean; start: number; end: number },
|
||||
): Array<T | Y | Z> {
|
||||
// Early return for empty arrays
|
||||
if (arr1.length === 0 && arr2.length === 0 && arr3.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Optimized for common case - no zoom
|
||||
if (!zoom?.enabled) {
|
||||
return mergeThreeSortedArrays(arr1, arr2, arr3);
|
||||
}
|
||||
|
||||
// Binary search for start indexes (faster than linear search for large arrays)
|
||||
const index1 = binarySearchStartIndex(arr1, zoom.start);
|
||||
const index2 = binarySearchStartIndex(arr2, zoom.start);
|
||||
const index3 = binarySearchStartIndex(arr3, zoom.start);
|
||||
|
||||
// Merge arrays within zoom range
|
||||
return mergeThreeSortedArraysWithinRange(
|
||||
arr1,
|
||||
arr2,
|
||||
arr3,
|
||||
index1,
|
||||
index2,
|
||||
index3,
|
||||
zoom.start,
|
||||
zoom.end,
|
||||
);
|
||||
}
|
||||
|
||||
function binarySearchStartIndex<T extends Record<string, any>>(
|
||||
arr: T[],
|
||||
threshold: number,
|
||||
): number {
|
||||
if (arr.length === 0) return 0;
|
||||
|
||||
let low = 0;
|
||||
let high = arr.length - 1;
|
||||
|
||||
// Handle edge cases first for better performance
|
||||
if (arr[high].time < threshold) return arr.length;
|
||||
if (arr[low].time >= threshold) return 0;
|
||||
|
||||
while (low <= high) {
|
||||
const mid = Math.floor((low + high) / 2);
|
||||
|
||||
if (arr[mid].time < threshold) {
|
||||
low = mid + 1;
|
||||
} else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
|
||||
function mergeThreeSortedArrays<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(arr1: T[], arr2: Y[], arr3: Z[]): Array<T | Y | Z> {
|
||||
const totalLength = arr1.length + arr2.length + arr3.length;
|
||||
// prealloc array size
|
||||
const result = new Array(totalLength);
|
||||
|
||||
let i = 0,
|
||||
j = 0,
|
||||
k = 0,
|
||||
index = 0;
|
||||
|
||||
while (i < arr1.length || j < arr2.length || k < arr3.length) {
|
||||
const val1 = i < arr1.length ? arr1[i].time : Infinity;
|
||||
const val2 = j < arr2.length ? arr2[j].time : Infinity;
|
||||
const val3 = k < arr3.length ? arr3[k].time : Infinity;
|
||||
|
||||
if (val1 <= val2 && val1 <= val3) {
|
||||
result[index++] = arr1[i++];
|
||||
} else if (val2 <= val1 && val2 <= val3) {
|
||||
result[index++] = arr2[j++];
|
||||
} else {
|
||||
result[index++] = arr3[k++];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// same as above, just with zoom stuff
|
||||
function mergeThreeSortedArraysWithinRange<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(
|
||||
arr1: T[],
|
||||
arr2: Y[],
|
||||
arr3: Z[],
|
||||
startIdx1: number,
|
||||
startIdx2: number,
|
||||
startIdx3: number,
|
||||
start: number,
|
||||
end: number,
|
||||
): Array<T | Y | Z> {
|
||||
// we don't know beforehand how many items will be there
|
||||
const result = [];
|
||||
|
||||
let i = startIdx1;
|
||||
let j = startIdx2;
|
||||
let k = startIdx3;
|
||||
|
||||
while (i < arr1.length || j < arr2.length || k < arr3.length) {
|
||||
const val1 = i < arr1.length ? arr1[i].time : Infinity;
|
||||
const val2 = j < arr2.length ? arr2[j].time : Infinity;
|
||||
const val3 = k < arr3.length ? arr3[k].time : Infinity;
|
||||
|
||||
// Early termination: if all remaining values exceed end time
|
||||
if (Math.min(val1, val2, val3) > end) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (val1 <= val2 && val1 <= val3) {
|
||||
if (val1 <= end) {
|
||||
result.push(arr1[i]);
|
||||
}
|
||||
i++;
|
||||
} else if (val2 <= val1 && val2 <= val3) {
|
||||
if (val2 <= end) {
|
||||
result.push(arr2[j]);
|
||||
}
|
||||
j++;
|
||||
} else {
|
||||
if (val3 <= end) {
|
||||
result.push(arr3[k]);
|
||||
}
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function processInChunks(
|
||||
items: any[],
|
||||
processFn: (item: any) => any,
|
||||
chunkSize = 1000,
|
||||
overscan = 0,
|
||||
) {
|
||||
return new Promise((resolve) => {
|
||||
if (items.length === 0) {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
let result: any[] = [];
|
||||
let index = 0;
|
||||
|
||||
const processNextChunk = () => {
|
||||
const chunk = items.slice(index, index + chunkSize + overscan);
|
||||
result = result.concat(processFn(chunk));
|
||||
index += chunkSize;
|
||||
|
||||
if (index < items.length) {
|
||||
setTimeout(processNextChunk, 0);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
};
|
||||
|
||||
processNextChunk();
|
||||
});
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@ function DocCard(props: Props) {
|
|||
} = props;
|
||||
|
||||
return (
|
||||
<div className={cn('p-5 bg-gray-lightest mb-4 rounded', className)}>
|
||||
<div className={cn('p-5 bg-gray-lightest mb-4 rounded-lg', className)}>
|
||||
<div className="font-medium mb-2 flex items-center">
|
||||
{props.icon && (
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ const AUTOREFRESH_INTERVAL = 2 * 60 * 1000;
|
|||
const PER_PAGE = 10;
|
||||
|
||||
function LiveSessionList() {
|
||||
const { searchStoreLive, sessionStore, customFieldStore } = useStore();
|
||||
const { searchStoreLive, sessionStore, customFieldStore, projectsStore } = useStore();
|
||||
const filter = searchStoreLive.instance;
|
||||
const list = sessionStore.liveSessions;
|
||||
const { totalLiveSessions } = sessionStore;
|
||||
|
|
@ -72,6 +72,12 @@ function LiveSessionList() {
|
|||
void searchStoreLive.fetchSessions();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (projectsStore.activeSiteId) {
|
||||
void searchStoreLive.fetchSessions(true);
|
||||
}
|
||||
}, [projectsStore.activeSiteId])
|
||||
|
||||
const onUserClick = (userId: string, userAnonymousId: string) => {
|
||||
if (userId) {
|
||||
searchStoreLive.addFilterByKeyAndValue(FilterKey.USERID, userId);
|
||||
|
|
@ -98,7 +104,7 @@ function LiveSessionList() {
|
|||
<div>
|
||||
<div className="bg-white py-3 rounded-lg border shadow-sm">
|
||||
<div className="flex mb-4 pb-2 px-3 justify-between items-center border-b border-b-gray-lighter">
|
||||
<LiveSessionReloadButton onClick={refetch} />
|
||||
<LiveSessionReloadButton />
|
||||
<div className="flex items-center">
|
||||
<div className="flex items-center ml-6">
|
||||
<span className="mr-2 color-gray-medium">{t('Sort By')}</span>
|
||||
|
|
|
|||
|
|
@ -4,15 +4,11 @@ import { observer } from 'mobx-react-lite';
|
|||
import ReloadButton from '../ReloadButton';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function LiveSessionReloadButton(props: Props) {
|
||||
function LiveSessionReloadButton() {
|
||||
const { t } = useTranslation();
|
||||
const { sessionStore } = useStore();
|
||||
const { onClick } = props;
|
||||
const loading = sessionStore.loadingLiveSessions;
|
||||
const { searchStoreLive } = useStore();
|
||||
const onClick = searchStoreLive.fetchSessions
|
||||
const loading = searchStoreLive.loading;
|
||||
return (
|
||||
<ReloadButton label={t('Refresh')} buttonSize={'small'} iconSize={14} loading={loading} onClick={onClick} className="cursor-pointer" />
|
||||
);
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ export default function ReloadButton(props: Props) {
|
|||
<Button
|
||||
type="default"
|
||||
size={buttonSize}
|
||||
loading={loading}
|
||||
onClick={onClick}
|
||||
icon={<SyncOutlined style={{ fontSize: iconSize }} />}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -1,8 +1,4 @@
|
|||
import { Input } from 'antd';
|
||||
import React from 'react';
|
||||
import { useStore } from 'App/mstore';
|
||||
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import NoSessionsMessage from 'Shared/NoSessionsMessage/NoSessionsMessage';
|
||||
import MainSearchBar from 'Shared/MainSearchBar/MainSearchBar';
|
||||
import usePageTitle from '@/hooks/usePageTitle';
|
||||
|
|
@ -13,22 +9,8 @@ import SessionHeader from './components/SessionHeader';
|
|||
import LatestSessionsMessage from './components/LatestSessionsMessage';
|
||||
|
||||
function SessionsTabOverview() {
|
||||
const [query, setQuery] = React.useState('');
|
||||
const { aiFiltersStore, searchStore } = useStore();
|
||||
const appliedFilter = searchStore.instance;
|
||||
usePageTitle('Sessions - OpenReplay');
|
||||
|
||||
const handleKeyDown = (event: any) => {
|
||||
if (event.key === 'Enter') {
|
||||
fetchResults();
|
||||
}
|
||||
};
|
||||
const fetchResults = () => {
|
||||
void aiFiltersStore.omniSearch(query, appliedFilter.toData());
|
||||
};
|
||||
|
||||
const testingKey =
|
||||
localStorage.getItem('__mauricio_testing_access') === 'true';
|
||||
return (
|
||||
<>
|
||||
<NoSessionsMessage />
|
||||
|
|
@ -36,15 +18,6 @@ function SessionsTabOverview() {
|
|||
<MainSearchBar />
|
||||
<div className="my-4" />
|
||||
<div className="widget-wrapper">
|
||||
{testingKey ? (
|
||||
<Input
|
||||
value={query}
|
||||
onKeyDown={handleKeyDown}
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
className="mb-2"
|
||||
placeholder="ask session ai"
|
||||
/>
|
||||
) : null}
|
||||
<SessionHeader />
|
||||
<div className="border-b" />
|
||||
<LatestSessionsMessage />
|
||||
|
|
@ -59,4 +32,4 @@ export default withPermissions(
|
|||
'',
|
||||
false,
|
||||
false,
|
||||
)(observer(SessionsTabOverview));
|
||||
)(SessionsTabOverview);
|
||||
|
|
|
|||
|
|
@ -20,73 +20,13 @@ const tagIcons = {
|
|||
function SessionTags() {
|
||||
const { t } = useTranslation();
|
||||
const screens = useBreakpoint();
|
||||
const { projectsStore, sessionStore, searchStore } = useStore();
|
||||
const total = sessionStore.total;
|
||||
const { projectsStore, searchStore } = useStore();
|
||||
const platform = projectsStore.active?.platform || '';
|
||||
const activeTab = searchStore.activeTags;
|
||||
const [isMobile, setIsMobile] = useState(false);
|
||||
const [isDropdownOpen, setIsDropdownOpen] = useState(false);
|
||||
const dropdownRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
const filteredOptions = issues_types
|
||||
.filter(
|
||||
(tag) =>
|
||||
tag.type !== 'mouse_thrashing' &&
|
||||
(platform === 'web'
|
||||
? tag.type !== types.TAP_RAGE
|
||||
: tag.type !== types.CLICK_RAGE),
|
||||
)
|
||||
.map((tag) => ({
|
||||
value: tag.type,
|
||||
icon: tagIcons[tag.type],
|
||||
label: t(tag.name),
|
||||
}));
|
||||
|
||||
// Find the currently active option
|
||||
const activeOption =
|
||||
filteredOptions.find((option) => option.value === activeTab[0]) ||
|
||||
filteredOptions[0];
|
||||
|
||||
// Check if on mobile
|
||||
useEffect(() => {
|
||||
const checkIfMobile = () => {
|
||||
setIsMobile(window.innerWidth < 768);
|
||||
};
|
||||
|
||||
checkIfMobile();
|
||||
window.addEventListener('resize', checkIfMobile);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkIfMobile);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (
|
||||
dropdownRef.current &&
|
||||
!(dropdownRef.current as HTMLElement).contains(event.target as Node)
|
||||
) {
|
||||
setIsDropdownOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Handler for dropdown item selection
|
||||
const handleSelectOption = (value: string) => {
|
||||
searchStore.toggleTag(value as any);
|
||||
setIsDropdownOpen(false);
|
||||
};
|
||||
|
||||
if (total === 0 && (activeTab.length === 0 || activeTab[0] === 'all')) {
|
||||
return null;
|
||||
}
|
||||
React.useEffect(() => {
|
||||
searchStore.toggleTag(types.ALL);
|
||||
}, [projectsStore.activeSiteId])
|
||||
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
|
|
|
|||
|
|
@ -5,17 +5,18 @@ import stl from './installDocs.module.css';
|
|||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
const installationCommand = 'npm i @openreplay/tracker';
|
||||
const usageCode = `import Tracker from '@openreplay/tracker';
|
||||
const usageCode = `import { tracker } from '@openreplay/tracker';
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
||||
tracker.start()`;
|
||||
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
|
||||
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: "PROJECT_KEY",
|
||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||
});
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ const Input = React.forwardRef((props: Props, ref: any) => {
|
|||
{icon && (
|
||||
<Icon
|
||||
name={icon}
|
||||
className="absolute top-0 bottom-0 my-auto ml-4"
|
||||
className="absolute top-0 bottom-0 my-auto ml-4 z-10"
|
||||
size="14"
|
||||
/>
|
||||
)}
|
||||
|
|
|
|||
30
frontend/app/layout/LangBanner.tsx
Normal file
30
frontend/app/layout/LangBanner.tsx
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import React from 'react'
|
||||
import {
|
||||
Languages, X, Info
|
||||
} from 'lucide-react'
|
||||
import { Button } from 'antd';
|
||||
import { useHistory } from "react-router-dom";
|
||||
import { client } from 'App/routes'
|
||||
|
||||
function LangBanner({ onClose }: { onClose: () => void }) {
|
||||
const history = useHistory()
|
||||
|
||||
const onClick = () => {
|
||||
history.push(client('account'))
|
||||
}
|
||||
return (
|
||||
<div className={'px-4 py-2 bg-yellow flex items-center w-screen gap-2'}>
|
||||
<Info size={16} />
|
||||
<div>
|
||||
OpenReplay now supports French, Russian, Chinese, and Spanish 🎉. Update your language in settings.
|
||||
</div>
|
||||
<div className={'ml-auto'} />
|
||||
<Button icon={<Languages size={14} />} size={'small'} onClick={onClick}>
|
||||
Change Language
|
||||
</Button>
|
||||
<Button icon={<X size={16} />} type={'text'} shape={'circle'} onClick={onClose} size={'small'} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default LangBanner
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import { Layout, Space, Tooltip } from 'antd';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import React, { useEffect } from 'react';
|
||||
import LangBanner from './LangBanner';
|
||||
|
||||
import { INDEXES } from 'App/constants/zindex';
|
||||
import Logo from 'App/layout/Logo';
|
||||
|
|
@ -11,14 +12,27 @@ import { useTranslation } from 'react-i18next';
|
|||
|
||||
const { Header } = Layout;
|
||||
|
||||
const langBannerClosedKey = '__or__langBannerClosed';
|
||||
const getLangBannerClosed = () => localStorage.getItem(langBannerClosedKey) === '1'
|
||||
function TopHeader() {
|
||||
const { userStore, notificationStore, projectsStore, settingsStore } =
|
||||
useStore();
|
||||
const { account } = userStore;
|
||||
const { siteId } = projectsStore;
|
||||
const { initialDataFetched } = userStore;
|
||||
const [langBannerClosed, setLangBannerClosed] = React.useState(getLangBannerClosed);
|
||||
const { t } = useTranslation();
|
||||
|
||||
React.useEffect(() => {
|
||||
const langBannerVal = localStorage.getItem(langBannerClosedKey);
|
||||
if (langBannerVal === null) {
|
||||
localStorage.setItem(langBannerClosedKey, '0')
|
||||
}
|
||||
if (langBannerVal === '0') {
|
||||
localStorage.setItem(langBannerClosedKey, '1')
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (!account.id || initialDataFetched) return;
|
||||
Promise.all([
|
||||
|
|
@ -29,7 +43,13 @@ function TopHeader() {
|
|||
});
|
||||
}, [account]);
|
||||
|
||||
const closeLangBanner = () => {
|
||||
setLangBannerClosed(true);
|
||||
localStorage.setItem(langBannerClosedKey, '1');
|
||||
}
|
||||
return (
|
||||
<>
|
||||
{langBannerClosed ? null : <LangBanner onClose={closeLangBanner} />}
|
||||
<Header
|
||||
style={{
|
||||
position: 'sticky',
|
||||
|
|
@ -74,6 +94,7 @@ function TopHeader() {
|
|||
|
||||
<TopRight />
|
||||
</Header>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,18 +11,13 @@ import ProjectDropdown from 'Shared/ProjectDropdown';
|
|||
import { useStore } from 'App/mstore';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
|
||||
interface Props {
|
||||
account: any;
|
||||
spotOnly?: boolean;
|
||||
}
|
||||
|
||||
function TopRight(props: Props) {
|
||||
function TopRight() {
|
||||
const { userStore } = useStore();
|
||||
const spotOnly = userStore.scopeState === 1;
|
||||
const { account } = userStore;
|
||||
return (
|
||||
<Space style={{ lineHeight: '0' }}>
|
||||
{props.spotOnly ? null : (
|
||||
{spotOnly ? null : (
|
||||
<>
|
||||
<ProjectDropdown />
|
||||
<GettingStartedProgress />
|
||||
|
|
@ -30,7 +25,6 @@ function TopRight(props: Props) {
|
|||
<Notifications />
|
||||
|
||||
{account.name ? <HealthStatus /> : null}
|
||||
<LanguageSwitcher />
|
||||
</>
|
||||
)}
|
||||
|
||||
|
|
|
|||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attribute": "More attribute",
|
||||
"More attributes": "More attributes",
|
||||
"Account settings updated successfully": "Account settings updated successfully",
|
||||
"Include rage clicks": "Include rage clicks"
|
||||
"Include rage clicks": "Include rage clicks",
|
||||
"Interface Language": "Interface Language",
|
||||
"Select the language in which OpenReplay will appear.": "Select the language in which OpenReplay will appear.",
|
||||
"Language": "Language"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attribute": "Más atributos",
|
||||
"More attributes": "Más atributos",
|
||||
"Account settings updated successfully": "Configuración de la cuenta actualizada correctamente",
|
||||
"Include rage clicks": "Incluir clics de ira"
|
||||
"Include rage clicks": "Incluir clics de ira",
|
||||
"Interface Language": "Idioma de la interfaz",
|
||||
"Select the language in which OpenReplay will appear.": "Selecciona el idioma en el que aparecerá OpenReplay.",
|
||||
"Language": "Idioma"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attribute": "Plus d'attributs",
|
||||
"More attributes": "Plus d'attributs",
|
||||
"Account settings updated successfully": "Paramètres du compte mis à jour avec succès",
|
||||
"Include rage clicks": "Inclure les clics de rage"
|
||||
"Include rage clicks": "Inclure les clics de rage",
|
||||
"Interface Language": "Langue de l'interface",
|
||||
"Select the language in which OpenReplay will appear.": "Sélectionnez la langue dans laquelle OpenReplay apparaîtra.",
|
||||
"Language": "Langue"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attribute": "Еще атрибут",
|
||||
"More attributes": "Еще атрибуты",
|
||||
"Account settings updated successfully": "Настройки аккаунта успешно обновлены",
|
||||
"Include rage clicks": "Включить невыносимые клики"
|
||||
"Include rage clicks": "Включить невыносимые клики",
|
||||
"Interface Language": "Язык интерфейса",
|
||||
"Select the language in which OpenReplay will appear.": "Выберите язык, на котором будет отображаться OpenReplay.",
|
||||
"Language": "Язык"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attributes": "更多属性",
|
||||
"More attribute": "更多属性",
|
||||
"Account settings updated successfully": "帐户设置已成功更新",
|
||||
"Include rage clicks": "包括点击狂怒"
|
||||
"Include rage clicks": "包括点击狂怒",
|
||||
"Interface Language": "界面语言",
|
||||
"Select the language in which OpenReplay will appear.": "选择 OpenReplay 将显示的语言。",
|
||||
"Language": "语言"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -254,7 +254,7 @@ class SearchStore {
|
|||
|
||||
this.savedSearch = new SavedSearch({});
|
||||
sessionStore.clearList();
|
||||
void this.fetchSessions(true);
|
||||
// void this.fetchSessions(true);
|
||||
}
|
||||
|
||||
async checkForLatestSessionCount(): Promise<void> {
|
||||
|
|
|
|||
|
|
@ -75,6 +75,8 @@ class SearchStoreLive {
|
|||
|
||||
loadingFilterSearch = false;
|
||||
|
||||
loading = false;
|
||||
|
||||
constructor() {
|
||||
makeAutoObservable(this);
|
||||
|
||||
|
|
@ -242,11 +244,25 @@ class SearchStoreLive {
|
|||
});
|
||||
};
|
||||
|
||||
async fetchSessions() {
|
||||
setLoading = (val: boolean) => {
|
||||
this.loading = val;
|
||||
}
|
||||
|
||||
fetchSessions = async (force?: boolean) => {
|
||||
if (!force && this.loading) {
|
||||
return;
|
||||
}
|
||||
this.setLoading(true)
|
||||
try {
|
||||
await sessionStore.fetchLiveSessions({
|
||||
...this.instance.toSearch(),
|
||||
page: this.currentPage,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Error fetching sessions:', e);
|
||||
} finally {
|
||||
this.setLoading(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -114,7 +114,9 @@ class UserStore {
|
|||
get isEnterprise() {
|
||||
return (
|
||||
this.account?.edition === 'ee' ||
|
||||
this.authStore.authDetails?.edition === 'ee'
|
||||
this.account?.edition === 'msaas' ||
|
||||
this.authStore.authDetails?.edition === 'ee' ||
|
||||
this.authStore.authDetails?.edition === 'msaas'
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -238,18 +240,7 @@ class UserStore {
|
|||
resolve(response);
|
||||
})
|
||||
.catch(async (e) => {
|
||||
const err = await e.response?.json();
|
||||
runInAction(() => {
|
||||
this.saving = false;
|
||||
});
|
||||
const errStr = err.errors[0]
|
||||
? err.errors[0].includes('already exists')
|
||||
? this.t(
|
||||
"This email is already linked to an account or team on OpenReplay and can't be used again.",
|
||||
)
|
||||
: err.errors[0]
|
||||
: this.t('Error saving user');
|
||||
toast.error(errStr);
|
||||
toast.error(e.message || this.t("Failed to save user's data."));
|
||||
reject(e);
|
||||
})
|
||||
.finally(() => {
|
||||
|
|
@ -389,15 +380,16 @@ class UserStore {
|
|||
this.signUpRequest = { loading: false, errors: [] };
|
||||
});
|
||||
} catch (error) {
|
||||
const inUse = error.message.includes('already in use');
|
||||
const inUseMsg = this.t('An account with this email already exists. Please log in or use a different email address.')
|
||||
const genericMsg = this.t('Error signing up; please check your data and try again')
|
||||
runInAction(() => {
|
||||
this.signUpRequest = {
|
||||
loading: false,
|
||||
errors: error.response?.errors || [],
|
||||
};
|
||||
});
|
||||
toast.error(
|
||||
this.t('Error signing up; please check your data and try again'),
|
||||
);
|
||||
toast.error(inUse ? inUseMsg : genericMsg);
|
||||
} finally {
|
||||
runInAction(() => {
|
||||
this.signUpRequest.loading = false;
|
||||
|
|
@ -416,9 +408,9 @@ class UserStore {
|
|||
this.jwt = data.jwt;
|
||||
this.spotJwt = data.spotJwt;
|
||||
});
|
||||
} catch (error) {
|
||||
toast.error(this.t('Error resetting your password; please try again'));
|
||||
return error.response;
|
||||
} catch (e) {
|
||||
toast.error(e.message || this.t('Error resetting your password; please try again'));
|
||||
throw e;
|
||||
} finally {
|
||||
runInAction(() => {
|
||||
this.loading = false;
|
||||
|
|
@ -663,14 +655,14 @@ class AuthStore {
|
|||
{
|
||||
key: 'authDetails',
|
||||
serialize: (ad) => {
|
||||
delete ad.edition;
|
||||
// delete ad.edition;
|
||||
return Object.keys(ad).length > 0
|
||||
? JSON.stringify(ad)
|
||||
: JSON.stringify({});
|
||||
},
|
||||
deserialize: (json) => {
|
||||
const ad = JSON.parse(json);
|
||||
delete ad.edition;
|
||||
// delete ad.edition;
|
||||
return ad;
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ export default class MessageLoader {
|
|||
});
|
||||
|
||||
const sortedMsgs = msgs
|
||||
// .sort((m1, m2) => m1.time - m2.time);
|
||||
// .sort((m1, m2) => m1.time - m2.time)
|
||||
.sort(brokenDomSorter)
|
||||
.sort(sortIframes);
|
||||
|
||||
|
|
@ -383,7 +383,6 @@ const DOMMessages = [
|
|||
MType.CreateElementNode,
|
||||
MType.CreateTextNode,
|
||||
MType.MoveNode,
|
||||
MType.RemoveNode,
|
||||
MType.CreateIFrameDocument,
|
||||
];
|
||||
|
||||
|
|
@ -395,6 +394,11 @@ function brokenDomSorter(m1: PlayerMsg, m2: PlayerMsg) {
|
|||
if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
|
||||
return 1;
|
||||
|
||||
if (m1.tp === MType.RemoveNode)
|
||||
return 1;
|
||||
if (m2.tp === MType.RemoveNode)
|
||||
return -1;
|
||||
|
||||
const m1IsDOM = DOMMessages.includes(m1.tp);
|
||||
const m2IsDOM = DOMMessages.includes(m2.tp);
|
||||
if (m1IsDOM && m2IsDOM) {
|
||||
|
|
|
|||
|
|
@ -190,6 +190,9 @@ export default class AssistManager {
|
|||
auth: {
|
||||
token: agentToken,
|
||||
},
|
||||
extraHeaders: {
|
||||
sessionId: this.session.sessionId,
|
||||
},
|
||||
query: {
|
||||
peerId: this.peerID,
|
||||
projectId,
|
||||
|
|
|
|||
|
|
@ -185,8 +185,7 @@ export default class Call {
|
|||
pc.ontrack = (event) => {
|
||||
const stream = event.streams[0];
|
||||
if (stream && !this.videoStreams[remotePeerId]) {
|
||||
const clonnedStream = stream.clone();
|
||||
this.videoStreams[remotePeerId] = clonnedStream.getVideoTracks()[0];
|
||||
this.videoStreams[remotePeerId] = stream.getVideoTracks()[0];
|
||||
if (this.store.get().calling !== CallingState.OnCall) {
|
||||
this.store.update({ calling: CallingState.OnCall });
|
||||
}
|
||||
|
|
@ -305,7 +304,6 @@ export default class Call {
|
|||
}
|
||||
try {
|
||||
// if the connection is not established yet, then set remoteDescription to peer
|
||||
if (!pc.localDescription) {
|
||||
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
|
||||
const answer = await pc.createAnswer();
|
||||
await pc.setLocalDescription(answer);
|
||||
|
|
@ -319,9 +317,6 @@ export default class Call {
|
|||
} else {
|
||||
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
|
||||
}
|
||||
} else {
|
||||
logger.warn('Skipping setRemoteDescription: Already in stable state');
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Error setting remote description from answer', e);
|
||||
this.callArgs?.onError?.(e);
|
||||
|
|
@ -388,13 +383,13 @@ export default class Call {
|
|||
private handleCallEnd() {
|
||||
// If the call is not completed, then call onCallEnd
|
||||
if (this.store.get().calling !== CallingState.NoCall) {
|
||||
this.callArgs && this.callArgs.onCallEnd();
|
||||
this.callArgs && this.callArgs.onRemoteCallEnd();
|
||||
}
|
||||
// change state to NoCall
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
// Close all created RTCPeerConnection
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onRemoteCallEnd();
|
||||
// Clear connections
|
||||
this.connections = {};
|
||||
this.callArgs = null;
|
||||
|
|
@ -414,7 +409,7 @@ export default class Call {
|
|||
// Close all connections and reset callArgs
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.connections = {};
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onRemoteCallEnd();
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
this.callArgs = null;
|
||||
} else {
|
||||
|
|
@ -443,7 +438,8 @@ export default class Call {
|
|||
private callArgs: {
|
||||
localStream: LocalStream;
|
||||
onStream: (s: MediaStream, isAgent: boolean) => void;
|
||||
onCallEnd: () => void;
|
||||
onRemoteCallEnd: () => void;
|
||||
onLocalCallEnd: () => void;
|
||||
onReject: () => void;
|
||||
onError?: (arg?: any) => void;
|
||||
} | null = null;
|
||||
|
|
@ -451,14 +447,16 @@ export default class Call {
|
|||
setCallArgs(
|
||||
localStream: LocalStream,
|
||||
onStream: (s: MediaStream, isAgent: boolean) => void,
|
||||
onCallEnd: () => void,
|
||||
onRemoteCallEnd: () => void,
|
||||
onLocalCallEnd: () => void,
|
||||
onReject: () => void,
|
||||
onError?: (e?: any) => void,
|
||||
) {
|
||||
this.callArgs = {
|
||||
localStream,
|
||||
onStream,
|
||||
onCallEnd,
|
||||
onRemoteCallEnd,
|
||||
onLocalCallEnd,
|
||||
onReject,
|
||||
onError,
|
||||
};
|
||||
|
|
@ -549,7 +547,7 @@ export default class Call {
|
|||
void this.initiateCallEnd();
|
||||
Object.values(this.connections).forEach((pc) => pc.close());
|
||||
this.connections = {};
|
||||
this.callArgs?.onCallEnd();
|
||||
this.callArgs?.onLocalCallEnd();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -138,26 +138,9 @@ export default class UserService {
|
|||
}
|
||||
|
||||
async resetPassword(data: any) {
|
||||
try {
|
||||
const response = await this.client.post('/password/reset', data);
|
||||
const responseData = await response.json();
|
||||
if (responseData.errors) {
|
||||
throw new Error(
|
||||
responseData.errors[0] || 'An unexpected error occurred.',
|
||||
);
|
||||
}
|
||||
|
||||
return responseData || {};
|
||||
} catch (error: any) {
|
||||
if (error.response) {
|
||||
const errorData = await error.response.json();
|
||||
const errorMessage = errorData.errors
|
||||
? errorData.errors[0]
|
||||
: 'An unexpected error occurred.';
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
throw new Error('An unexpected error occurred.');
|
||||
}
|
||||
}
|
||||
|
||||
async requestResetPassword(data: any) {
|
||||
|
|
|
|||
|
|
@ -29,6 +29,15 @@ export function debounce(callback, wait, context = this) {
|
|||
};
|
||||
}
|
||||
|
||||
export function debounceCall(func, wait) {
|
||||
let timeout;
|
||||
return function (...args) {
|
||||
const context = this;
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => func.apply(context, args), wait);
|
||||
};
|
||||
}
|
||||
|
||||
export function randomInt(a, b) {
|
||||
const min = (b ? a : 0) - 0.5;
|
||||
const max = b || a || Number.MAX_SAFE_INTEGER;
|
||||
|
|
@ -597,8 +606,7 @@ function saveAsFile(blob: Blob, filename: string) {
|
|||
}
|
||||
|
||||
export function exportAntCsv(tableColumns, tableData, filename = 'table.csv') {
|
||||
console.log(tableColumns, tableData);
|
||||
const headers = tableColumns.map((col) => col.title).join(',');
|
||||
const headers = tableColumns.map((col) => col._pureTitle).join(',');
|
||||
const rows = tableData.map((row) =>
|
||||
tableColumns
|
||||
.map((col) => {
|
||||
|
|
|
|||
219
frontend/app/withRecaptcha.tsx
Normal file
219
frontend/app/withRecaptcha.tsx
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
import React, { useState, useRef, ComponentType, ReactNode, useCallback, useEffect, useLayoutEffect } from 'react';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { toast } from "react-toastify";
|
||||
|
||||
// Define a more specific type for submission data
|
||||
export interface SubmissionData {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export interface WithCaptchaProps {
|
||||
submitWithCaptcha: (data: SubmissionData) => Promise<any>;
|
||||
hasCaptchaError: boolean;
|
||||
isVerifyingCaptcha: boolean;
|
||||
resetCaptcha: () => void;
|
||||
}
|
||||
|
||||
export interface WithCaptchaOptions {
|
||||
position?: 'visible' | 'hidden';
|
||||
errorMessage?: string;
|
||||
theme?: 'light' | 'dark';
|
||||
size?: 'normal' | 'compact' | 'invisible';
|
||||
}
|
||||
|
||||
// Safely get environment variables with fallbacks
|
||||
const getCaptchaConfig = () => {
|
||||
const enabled = typeof window !== 'undefined' &&
|
||||
window.env?.CAPTCHA_ENABLED === 'true';
|
||||
|
||||
const siteKey = typeof window !== 'undefined' ?
|
||||
window.env?.CAPTCHA_SITE_KEY || '' : '';
|
||||
|
||||
return { enabled, siteKey };
|
||||
};
|
||||
|
||||
/**
|
||||
* Higher-Order Component that adds reCAPTCHA functionality to a form component
|
||||
*
|
||||
* @param WrappedComponent The component to wrap with CAPTCHA functionality
|
||||
* @param options Configuration options for the CAPTCHA behavior
|
||||
* @returns A new component with CAPTCHA capabilities
|
||||
*/
|
||||
const withCaptcha = <P extends object>(
|
||||
WrappedComponent: ComponentType<P & WithCaptchaProps>,
|
||||
options: WithCaptchaOptions = {}
|
||||
): React.FC<P> => {
|
||||
// Default options
|
||||
const {
|
||||
position = 'hidden',
|
||||
errorMessage = 'Please complete the CAPTCHA verification',
|
||||
theme = 'light',
|
||||
size = 'invisible'
|
||||
} = options;
|
||||
|
||||
const WithCaptchaComponent: React.FC<P> = (props: P) => {
|
||||
const { enabled: CAPTCHA_ENABLED, siteKey: CAPTCHA_SITE_KEY } = getCaptchaConfig();
|
||||
const [captchaToken, setCaptchaToken] = useState<string | null>(null);
|
||||
const [isVerifyingCaptcha, setIsVerifyingCaptcha] = useState<boolean>(false);
|
||||
const [tokenExpired, setTokenExpired] = useState<boolean>(false);
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
|
||||
// Reset token when expired
|
||||
useEffect(() => {
|
||||
if (tokenExpired) {
|
||||
setCaptchaToken(null);
|
||||
setTokenExpired(false);
|
||||
}
|
||||
}, [tokenExpired]);
|
||||
|
||||
// Handle token expiration
|
||||
const onCaptchaExpired = useCallback(() => {
|
||||
setTokenExpired(true);
|
||||
if (CAPTCHA_ENABLED) {
|
||||
toast.warning('CAPTCHA verification expired. Please verify again.');
|
||||
}
|
||||
}, [CAPTCHA_ENABLED]);
|
||||
|
||||
// Handle token change
|
||||
let onCaptchaChange = (token: string | null) => {
|
||||
console.log('Standard captcha callback received token:', !!token);
|
||||
setCaptchaToken(token);
|
||||
setTokenExpired(false);
|
||||
};
|
||||
|
||||
// Reset captcha manually
|
||||
const resetCaptcha = useCallback(() => {
|
||||
recaptchaRef.current?.reset();
|
||||
setCaptchaToken(null);
|
||||
}, []);
|
||||
|
||||
// Submit with captcha verification
|
||||
const submitWithCaptcha = useCallback(
|
||||
(data: SubmissionData): Promise<any> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!CAPTCHA_ENABLED) {
|
||||
// CAPTCHA not enabled, resolve with original data
|
||||
resolve(data);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsVerifyingCaptcha(true);
|
||||
|
||||
// Special handling for invisible reCAPTCHA
|
||||
if (size === 'invisible') {
|
||||
// Create a direct token handler function
|
||||
const handleToken = (receivedToken: string | null) => {
|
||||
console.log('reCAPTCHA token received:', !!receivedToken);
|
||||
|
||||
if (receivedToken) {
|
||||
// We have a token, resolve the promise
|
||||
const dataWithCaptcha = {
|
||||
...data,
|
||||
'g-recaptcha-response': receivedToken
|
||||
};
|
||||
|
||||
resolve(dataWithCaptcha);
|
||||
|
||||
// Reset for next use
|
||||
setTimeout(() => {
|
||||
recaptchaRef.current?.reset();
|
||||
setIsVerifyingCaptcha(false);
|
||||
}, 100);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up a callback directly on the reCAPTCHA ref
|
||||
if (recaptchaRef.current) {
|
||||
console.log('Executing invisible reCAPTCHA');
|
||||
|
||||
// Execute the reCAPTCHA challenge
|
||||
recaptchaRef.current.executeAsync()
|
||||
.then((token: string | null) => {
|
||||
handleToken(token);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('reCAPTCHA execution failed:', error);
|
||||
setIsVerifyingCaptcha(false);
|
||||
reject(new Error('CAPTCHA verification failed'));
|
||||
});
|
||||
|
||||
// Set a timeout in case the promise doesn't resolve
|
||||
setTimeout(() => {
|
||||
if (isVerifyingCaptcha) {
|
||||
console.log('reCAPTCHA verification timed out');
|
||||
setIsVerifyingCaptcha(false);
|
||||
toast.error(errorMessage || 'Verification timed out. Please try again.');
|
||||
reject(new Error('CAPTCHA verification timeout'));
|
||||
}
|
||||
}, 5000);
|
||||
} else {
|
||||
console.error('reCAPTCHA ref not available');
|
||||
setIsVerifyingCaptcha(false);
|
||||
reject(new Error('CAPTCHA component not initialized'));
|
||||
}
|
||||
} else if (captchaToken) {
|
||||
// Standard reCAPTCHA with token already available
|
||||
const dataWithCaptcha = {
|
||||
...data,
|
||||
'g-recaptcha-response': captchaToken
|
||||
};
|
||||
|
||||
resolve(dataWithCaptcha);
|
||||
recaptchaRef.current?.reset();
|
||||
setCaptchaToken(null);
|
||||
setIsVerifyingCaptcha(false);
|
||||
} else {
|
||||
// Standard reCAPTCHA but no token yet
|
||||
toast.error(errorMessage || 'Please complete the CAPTCHA verification');
|
||||
reject(new Error('CAPTCHA verification required'));
|
||||
setIsVerifyingCaptcha(false);
|
||||
}
|
||||
});
|
||||
},
|
||||
[CAPTCHA_ENABLED, captchaToken, errorMessage, size, isVerifyingCaptcha]
|
||||
);
|
||||
|
||||
const hasCaptchaError = !captchaToken && CAPTCHA_ENABLED === true;
|
||||
|
||||
return (
|
||||
<>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className={position === 'hidden' ? 'sr-only' : 'mb-4'}>
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={onCaptchaChange}
|
||||
onExpired={onCaptchaExpired}
|
||||
theme={theme}
|
||||
size={size}
|
||||
/>
|
||||
{hasCaptchaError && (
|
||||
<div className="text-red-500 text-sm mt-1">
|
||||
{errorMessage}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<WrappedComponent
|
||||
{...props}
|
||||
submitWithCaptcha={submitWithCaptcha}
|
||||
hasCaptchaError={hasCaptchaError}
|
||||
isVerifyingCaptcha={isVerifyingCaptcha}
|
||||
resetCaptcha={resetCaptcha}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
// Display name for debugging
|
||||
const wrappedComponentName =
|
||||
WrappedComponent.displayName ||
|
||||
WrappedComponent.name ||
|
||||
'Component';
|
||||
|
||||
WithCaptchaComponent.displayName = `WithCaptcha(${wrappedComponentName})`;
|
||||
|
||||
return WithCaptchaComponent;
|
||||
};
|
||||
|
||||
export default withCaptcha;
|
||||
|
|
@ -10,7 +10,27 @@ metadata:
|
|||
{{- include "assist.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/rewrite-target: /$1
|
||||
nginx.ingress.kubernetes.io/upstream-hash-by: $http_x_forwarded_for
|
||||
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||
#set $sticky_used "no";
|
||||
#if ($sessionid != "") {
|
||||
# set $sticky_used "yes";
|
||||
#}
|
||||
|
||||
#add_header X-Debug-Session-ID $sessionid;
|
||||
#add_header X-Debug-Session-Type "wss";
|
||||
#add_header X-Sticky-Session-Used $sticky_used;
|
||||
#add_header X-Upstream-Server $upstream_addr;
|
||||
|
||||
proxy_hide_header access-control-allow-headers;
|
||||
proxy_hide_header Access-Control-Allow-Origin;
|
||||
add_header 'Access-Control-Allow-Origin' $http_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'sessionid, Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Max-Age' 1728000;
|
||||
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
||||
|
||||
nginx.ingress.kubernetes.io/upstream-hash-by: $session_id
|
||||
|
||||
{{- with .Values.ingress.annotations }}
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue