Compare commits

...
Sign in to create a new pull request.

25 commits

Author SHA1 Message Date
Андрей Бабушкин
5f9ed3a087 combine in 1 line 2025-03-21 15:18:10 +01:00
nick-delirium
1e2dde09b4
ui: onboarding fixes 2025-03-21 10:43:51 +01:00
nick-delirium
3cdfe76134
ui: add sessionId header for AssistManager.ts 2025-03-21 10:18:33 +01:00
nick-delirium
39855651d5
ui: use polling for first request 2025-03-21 09:52:00 +01:00
Taha Yassine Kraiem
dd469d2349 refactor(chalice): initial product analytics 2025-03-20 17:13:17 +01:00
Taha Yassine Kraiem
3d448320bf refactor(DB): changed DB structure for product analytics 2025-03-20 17:13:17 +01:00
Taha Yassine Kraiem
7b0771a581 refactor(chalice): upgraded dependencies 2025-03-20 17:13:17 +01:00
Taha Yassine Kraiem
988b396223 refactor(chalice): moved CH sessions-search to FOSS
refactor(DB): changed DB structures for CH sessions-search in FOSS
refactor(DB): preparing for v1.23.0
2025-03-20 17:13:17 +01:00
nick-delirium
fa3b585785
ui: fix table column export 2025-03-20 16:06:48 +01:00
Alexander
91e0ebeb56 feat(assist): improved caching mechanism for cluster mode 2025-03-20 13:52:14 +01:00
rjshrjndrn
8e68eb9a20 feat(assist): enhance WebSocket session persistence
Add session extraction from peerId parameter for better WebSocket
connection stability. This improves assist session routing by:

- Extracting sessionID from peerId parameter using regex
- Setting upstream hash-by to use the extracted session ID
- Adding debug headers to monitor session routing

TODO: Convert this to map

Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
2025-03-20 12:38:36 +01:00
nick-delirium
13bd3d9121
tracker: add sessId header for assist polling 2025-03-20 12:13:40 +01:00
nick-delirium
048ae0913c
ui: refetch live session list on proj change 2025-03-19 17:36:33 +01:00
Shekar Siri
73fff8b817 feat(auth): support msaas edition for enterprise features
Add msaas to the isEnterprise check alongside ee edition to properly
display enterprise features. Use userStore.isEnterprise in SSOLogin
component instead of directly checking authDetails.edition for
consistent
enterprise status detection.
2025-03-19 14:40:05 +01:00
Shekar Siri
605fa96a34
feat(auth): implement withCaptcha HOC for consistent reCAPTCHA (#3175)
* refactor(searchStore): reformat filterMap function parameters (#3166)

- Reformat the parameters of the filterMap function for better readability.
- Comment out the fetchSessions call in clearSearch method to avoid unnecessary session fetch.

* Increment frontend chart version (#3167)

Co-authored-by: GitHub Action <action@github.com>

* refactor(chalice): cleaned code
fix(chalice): fixed session-search-pg sortKey issue
fix(chalice): fixed CH-query-formatter to handle special chars
fix(chalice): fixed /ids response

* feat(auth): implement withCaptcha HOC for consistent reCAPTCHA

This commit refactors the reCAPTCHA implementation across the application
by introducing a Higher Order Component (withCaptcha) that encapsulates
captcha verification logic. The changes:

- Create a reusable withCaptcha HOC in withRecaptcha.tsx
- Refactor Login, ResetPasswordRequest, and CreatePassword components
- Extract SSOLogin into a separate component
- Improve error handling and user feedback
- Standardize loading and verification states across forms
- Make captcha implementation more maintainable and consistent

---------

Co-authored-by: Mehdi Osman <estradino@users.noreply.github.com>
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Taha Yassine Kraiem <tahayk2@gmail.com>
2025-03-19 11:37:50 +01:00
Andrey Babushkin
2cb33d7894
changhe sort events logic (#3174) 2025-03-18 18:27:48 +01:00
nick-delirium
15d427418d
tracker: fix autogen version 2025-03-18 16:37:09 +01:00
nick-delirium
ed3e553726
tracker: assist 11.0.1 changelog 2025-03-18 16:36:10 +01:00
nick-delirium
7eace68de6
ui: add loading state for LiveSessionReloadButton.tsx 2025-03-18 15:30:24 +01:00
Taha Yassine Kraiem
8009882cef refactor(chalice): cleaned code
fix(chalice): fixed session-search-pg sortKey issue
fix(chalice): fixed CH-query-formatter to handle special chars
fix(chalice): fixed /ids response

(cherry picked from commit b505645782)
2025-03-18 13:52:56 +01:00
Andrey Babushkin
7365d8639c
updated widget link (#3158)
* updated widget link

* fix calls

* updated widget url
2025-03-18 11:07:09 +01:00
nick-delirium
4c967d4bc1
ui: update tracker import examples 2025-03-17 13:42:34 +01:00
Alexander
3fdf799bd7 feat(http): unsupported tracker error with projectID in logs 2025-03-17 13:32:00 +01:00
nick-delirium
9aca716e6b
tracker: 16.0.2 fix str dictionary keys 2025-03-17 11:25:54 +01:00
Shekar Siri
cf9ecdc9a4 refactor(searchStore): reformat filterMap function parameters
- Reformat the parameters of the filterMap function for better readability.
- Comment out the fetchSessions call in clearSearch method to avoid unnecessary session fetch.
2025-03-14 19:47:42 +01:00
95 changed files with 1522 additions and 1509 deletions

View file

@ -6,16 +6,15 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.12"
boto3 = "==1.37.16"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
elasticsearch = "==8.17.2"
jira = "==3.8.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
cachetools = "==5.5.2"
fastapi = "==0.115.11"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}

View file

@ -16,7 +16,7 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client, ch_client
from crons import core_crons, core_dynamic_crons
from routers import core, core_dynamic
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
loglevel = config("LOGLEVEL", default=logging.WARNING)
print(f">Loglevel set to: {loglevel}")
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
app.include_router(spot.app)
app.include_router(spot.app_apikey)
app.include_router(product_anaytics.public_app)
app.include_router(product_anaytics.app)
app.include_router(product_anaytics.app_apikey)
app.include_router(product_analytics.public_app, prefix="/pa")
app.include_router(product_analytics.app, prefix="/pa")
app.include_router(product_analytics.app_apikey, prefix="/pa")

View file

@ -1,14 +0,0 @@
from chalicelib.utils.ch_client import ClickHouseClient
def search_events(project_id: int, data: dict):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT *
FROM taha.events
WHERE project_id=%(project_id)s
ORDER BY created_at;""",
params={"project_id": project_id})
x = ch_client.execute(r)
return x

View file

@ -0,0 +1,28 @@
from chalicelib.utils import helper
from chalicelib.utils.ch_client import ClickHouseClient
def get_events(project_id: int):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT event_name, display_name
FROM product_analytics.all_events
WHERE project_id=%(project_id)s
ORDER BY display_name;""",
parameters={"project_id": project_id})
x = ch_client.execute(r)
return helper.list_to_camel_case(x)
def search_events(project_id: int, data: dict):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT *
FROM product_analytics.events
WHERE project_id=%(project_id)s
ORDER BY created_at;""",
parameters={"project_id": project_id})
x = ch_client.execute(r)
return helper.list_to_camel_case(x)

View file

@ -0,0 +1,19 @@
from chalicelib.utils import helper
from chalicelib.utils.ch_client import ClickHouseClient
def get_properties(project_id: int, event_name):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT all_properties.property_name,
all_properties.display_name
FROM product_analytics.event_properties
INNER JOIN product_analytics.all_properties USING (property_name)
WHERE event_properties.project_id=%(project_id)s
AND all_properties.project_id=%(project_id)s
AND event_properties.event_name=%(event_name)s
ORDER BY created_at;""",
parameters={"project_id": project_id,"event_name": event_name})
properties = ch_client.execute(r)
return helper.list_to_camel_case(properties)

View file

@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
from . import sessions_pg
from . import sessions_pg as sessions_legacy
from . import sessions_ch
from . import sessions_search_pg
from . import sessions_search_pg as sessions_search_legacy
if config("EXP_METRICS", cast=bool, default=False):
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions
from . import sessions_search_ch as sessions_search
else:
from . import sessions_pg as sessions
from . import sessions_search_pg as sessions_search
# if config("EXP_METRICS", cast=bool, default=False):
# from . import sessions_ch as sessions
# else:
# from . import sessions_pg as sessions

View file

@ -671,24 +671,36 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(json_condition(
"main",
"$properties",
"selector", op, event.value, e_k)
# event_where.append(json_condition(
# "main",
# "$properties",
# "selector", op, event.value, e_k)
# )
event_where.append(
sh.multi_conditions(f"main.`$properties`.selector {op} %({e_k})s",
event.value, value_key=e_k)
)
events_conditions[-1]["condition"] = event_where[-1]
else:
if is_not:
event_where.append(json_condition(
"sub", "$properties", _column, op, event.value, e_k
))
# event_where.append(json_condition(
# "sub", "$properties", _column, op, event.value, e_k
# ))
event_where.append(
sh.multi_conditions(f"sub.`$properties`.{_column} {op} %({e_k})s",
event.value, value_key=e_k)
)
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
# event_where.append(
# json_condition("main", "$properties", _column, op, event.value, e_k)
# )
event_where.append(
json_condition("main", "$properties", _column, op, event.value, e_k)
sh.multi_conditions(f"main.`$properties`.{_column} {op} %({e_k})s",
event.value, value_key=e_k)
)
events_conditions[-1]["condition"] = event_where[-1]
else:
@ -870,12 +882,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
event_where.append(
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
value_key=s_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
@ -1193,6 +1208,28 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
else:
continue
if event.properties is not None and len(event.properties.filters) > 0:
event_fiters = []
for l, property in enumerate(event.properties.filters):
a_k = f"{e_k}_att_{l}"
full_args = {**full_args,
**sh.multi_values(property.value, value_key=a_k)}
op = sh.get_sql_operator(property.operator)
condition = f"main.properties.{property.name} {op} %({a_k})s"
if property.is_predefined:
condition = f"main.{property.name} {op} %({a_k})s"
event_where.append(
sh.multi_conditions(condition, property.value, value_key=a_k)
)
event_fiters.append(event_where[-1])
if len(event_fiters) > 0:
events_conditions[-1]["condition"] += " AND ("
for l, e_f in enumerate(event_fiters):
if l > 0:
events_conditions[-1]["condition"] += event.properties.operators[l - 1] + e_f
else:
events_conditions[-1]["condition"] += e_f
events_conditions[-1]["condition"] += ")"
if event_index == 0 or or_events:
event_where += ss_constraints
if is_not:

View file

@ -1,6 +1,5 @@
import ast
import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata, projects

View file

@ -141,7 +141,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
) AS users_sessions;""",
full_args)
elif ids_only:
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
{query_part}
ORDER BY s.session_id desc
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
ORDER BY sort_key {data.order}
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
parameters=full_args)
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
try:
logging.debug("--------------------")
sessions_list = cur.execute(main_query)
logging.debug("--------------------")
except Exception as err:
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query)

View file

@ -122,7 +122,10 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort)
if data.sort == 'datetime':
sort = 'start_ts'
else:
sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project.project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,

View file

@ -11,9 +11,3 @@ if smtp.has_smtp():
logger.info("valid SMTP configuration found")
else:
logger.info("no SMTP configuration found or SMTP validation failed")
if config("EXP_CH_DRIVER", cast=bool, default=True):
logging.info(">>> Using new CH driver")
from . import ch_client_exp as ch_client
else:
from . import ch_client

View file

@ -1,73 +1,185 @@
import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_driver
import clickhouse_connect
from clickhouse_connect.driver.query import QueryContext
from decouple import config
logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
else:
logger.debug(
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(args[0])
else:
logger.debug(str.encode(args[0]))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient:
__client = None
def __init__(self, database=None):
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
self.__client = clickhouse_driver.Client(host=config("ch_host"),
database=database if database else config("ch_database",
default="default"),
user=config("ch_user", default="default"),
password=config("ch_password", default=""),
port=config("ch_port", cast=int),
settings=settings,
**extra_args) \
if self.__client is None else self.__client
if self.__client is None:
if database is not None or not config('CH_POOL', cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
database=database if database else config("ch_database",
default="default"),
settings=settings,
**extra_args)
else:
self.__client = CH_pool.get_connection()
self.__client.execute = transform_result(self, self.__client.query)
self.__client.format = self.format
def __enter__(self):
return self
def execute(self, query, parameters=None, **args):
try:
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
keys = tuple(x for x, y in results[1])
return [dict(zip(keys, i)) for i in results[0]]
except Exception as err:
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
logger.error("--------- CH QUERY EXCEPTION -----------")
logger.error(self.format(query=query, parameters=parameters)
.replace('\n', '\\n')
.replace(' ', ' ')
.replace(' ', ' '))
logger.error("--------------------")
raise err
def insert(self, query, params=None, **args):
return self.__client.execute(query=query, params=params, **args)
def client(self):
return self.__client
def format(self, query, parameters):
if parameters is None:
return query
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
def format(self, query, parameters=None):
if parameters:
ctx = QueryContext(query=query, parameters=parameters)
return ctx.final_query
return query
def __exit__(self, *args):
pass
if config('CH_POOL', cast=bool, default=True):
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init():
logger.info(f">CH_POOL:not defined")
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate():
pass
global CH_pool
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -1,177 +0,0 @@
import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_connect
from clickhouse_connect.driver.query import QueryContext
from decouple import config
logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient:
__client = None
def __init__(self, database=None):
if self.__client is None:
if database is not None or not config('CH_POOL', cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
database=database if database else config("ch_database",
default="default"),
settings=settings,
**extra_args)
else:
self.__client = CH_pool.get_connection()
self.__client.execute = transform_result(self, self.__client.query)
self.__client.format = self.format
def __enter__(self):
return self.__client
def format(self, query, *, parameters=None):
if parameters is None:
return query
return query % {
key: f"'{value}'" if isinstance(value, str) else value
for key, value in parameters.items()
}
def __exit__(self, *args):
if config('CH_POOL', cast=bool, default=True):
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init():
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate():
global CH_pool
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_METRICS=true
EXP_SESSIONS_SEARCH=true

View file

@ -1,591 +0,0 @@
-- -- Original Q3
-- WITH ranked_events AS (SELECT *
-- FROM ranked_events_1736344377403),
-- n1 AS (SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- COUNT(1) AS sessions_count
-- FROM ranked_events
-- WHERE event_number_in_session = 1
-- AND isNotNull(next_value)
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n2 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n1
-- INNER JOIN ranked_events AS re
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
-- WHERE re.event_number_in_session = 2
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n3 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n2
-- INNER JOIN ranked_events AS re
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
-- WHERE re.event_number_in_session = 3
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n4 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n3
-- INNER JOIN ranked_events AS re
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
-- WHERE re.event_number_in_session = 4
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n5 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n4
-- INNER JOIN ranked_events AS re
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
-- WHERE re.event_number_in_session = 5
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8)
-- SELECT *
-- FROM (SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n1
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n2
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n3
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n4
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n5) AS chart_steps
-- ORDER BY event_number_in_session;
-- Q1
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
FROM experimental.events AS events
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
AND events.project_id = toUInt16(65)
AND events.datetime >= toDateTime(1735599600000 / 1000)
AND events.datetime < toDateTime(1736290799999 / 1000)
GROUP BY 1),
pre_ranked_events AS (SELECT *
FROM (SELECT session_id,
event_type,
datetime,
url_path AS e_value,
row_number() OVER (PARTITION BY session_id
ORDER BY datetime ,
message_id ) AS event_number_in_session
FROM experimental.events AS events
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
WHERE events.project_id = toUInt16(65)
AND events.datetime >= toDateTime(1735599600000 / 1000)
AND events.datetime < toDateTime(1736290799999 / 1000)
AND (events.event_type = 'LOCATION')
AND events.datetime >= initial_event.start_event_timestamp
) AS full_ranked_events
WHERE event_number_in_session <= 5)
SELECT *
FROM pre_ranked_events);
;
SELECT *
FROM pre_ranked_events_1736344377403
WHERE event_number_in_session < 3;
-- ---------Q2-----------
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
DROP TABLE ranked_events_1736344377403;
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
(WITH pre_ranked_events AS (SELECT *
FROM pre_ranked_events_1736344377403),
start_points AS (SELECT DISTINCT session_id
FROM pre_ranked_events
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
AND pre_ranked_events.event_number_in_session = 1),
ranked_events AS (SELECT pre_ranked_events.*,
leadInFrame(e_value)
OVER (PARTITION BY session_id ORDER BY datetime
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
leadInFrame(toNullable(event_type))
OVER (PARTITION BY session_id ORDER BY datetime
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
FROM start_points
INNER JOIN pre_ranked_events USING (session_id))
SELECT *
FROM ranked_events);
-- ranked events
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 2
-- AND e_value='/en/deployment/deploy-docker/'
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
AND isNotNull(next_value)
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY event_number_in_session, sessions_count DESC;
SELECT event_number_in_session,
event_type,
e_value,
COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value
ORDER BY event_number_in_session, sessions_count DESC;
SELECT COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 2
AND isNull(next_value)
;
-- ---------Q3 MORE -----------
WITH ranked_events AS (SELECT *
FROM ranked_events_1736344377403),
n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n2 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 2
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n3 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 3
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
drop_n AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n1
WHERE isNull(n1.next_type)
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n2
WHERE isNull(n2.next_type)),
-- TODO: make this as top_steps, where every step will go to next as top/others
top_n1 AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n1
WHERE isNotNull(next_type)
ORDER BY sessions_count DESC
LIMIT 3),
top_n2 AS (-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n2
WHERE (event_type, e_value) IN (SELECT event_type,
e_value
FROM n2
WHERE isNotNull(next_type)
GROUP BY event_type, e_value
ORDER BY SUM(sessions_count) DESC
LIMIT 3)
ORDER BY sessions_count DESC),
top_n AS (SELECT *
FROM top_n1
UNION ALL
SELECT *
FROM top_n2),
u_top_n AS (SELECT DISTINCT event_number_in_session,
event_type,
e_value
FROM top_n),
others_n AS (
-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n1
WHERE isNotNull(next_type)
ORDER BY sessions_count DESC
LIMIT 1000000 OFFSET 3
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n2
WHERE isNotNull(next_type)
-- GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 1000000 OFFSET 3)
SELECT *
FROM (
-- Top
SELECT *
FROM top_n
-- UNION ALL
-- -- Others
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- 'OTHER' AS next_type,
-- NULL AS next_value,
-- SUM(sessions_count)
-- FROM others_n
-- GROUP BY event_number_in_session, event_type, e_value
-- UNION ALL
-- -- Top go to Drop
-- SELECT drop_n.event_number_in_session,
-- drop_n.event_type,
-- drop_n.e_value,
-- drop_n.next_type,
-- drop_n.next_value,
-- drop_n.sessions_count
-- FROM drop_n
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
-- AND drop_n.event_type = u_top_n.event_type
-- AND drop_n.e_value = u_top_n.e_value)
-- ORDER BY drop_n.event_number_in_session
-- -- -- UNION ALL
-- -- -- Top go to Others
-- SELECT top_n.event_number_in_session,
-- top_n.event_type,
-- top_n.e_value,
-- 'OTHER' AS next_type,
-- NULL AS next_value,
-- SUM(top_n.sessions_count) AS sessions_count
-- FROM top_n
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
-- AND top_n.next_type = others_n.event_type
-- AND top_n.next_value = others_n.e_value)
-- WHERE others_n.event_number_in_session IS NULL
-- AND top_n.next_type IS NOT NULL
-- GROUP BY event_number_in_session, event_type, e_value
-- UNION ALL
-- -- Others got to Top
-- SELECT others_n.event_number_in_session,
-- 'OTHER' AS event_type,
-- NULL AS e_value,
-- others_n.s_next_type AS next_type,
-- others_n.s_next_value AS next_value,
-- SUM(sessions_count) AS sessions_count
-- FROM others_n
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
-- others_n.s_next_type = top_n.event_type AND
-- others_n.s_next_value = top_n.event_type)
-- GROUP BY others_n.event_number_in_session, next_type, next_value
-- UNION ALL
-- -- TODO: find if this works or not
-- -- Others got to Others
-- SELECT others_n.event_number_in_session,
-- 'OTHER' AS event_type,
-- NULL AS e_value,
-- 'OTHERS' AS next_type,
-- NULL AS next_value,
-- SUM(sessions_count) AS sessions_count
-- FROM others_n
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
-- AND others_n.s_next_type = u_top_n.event_type
-- AND others_n.s_next_value = u_top_n.e_value)
-- WHERE u_top_n.event_number_in_session IS NULL
-- GROUP BY others_n.event_number_in_session
)
ORDER BY event_number_in_session;
-- ---------Q3 TOP ON VALUE ONLY -----------
WITH ranked_events AS (SELECT *
FROM ranked_events_1736344377403),
n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n2 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 2
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n3 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 3
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
drop_n AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n1
WHERE isNull(n1.next_type)
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n2
WHERE isNull(n2.next_type)),
top_n AS (SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n1
GROUP BY event_number_in_session, event_type, e_value
LIMIT 1
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n2
GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 3
UNION ALL
-- STEP 3
SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n3
GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 3),
top_n_with_next AS (SELECT n1.*
FROM n1
UNION ALL
SELECT n2.*
FROM n2
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
AND n2.event_type = top_n.event_type
AND n2.e_value = top_n.e_value)),
others_n AS (
-- STEP 2
SELECT n2.*
FROM n2
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
(SELECT event_number_in_session, event_type, e_value
FROM top_n
WHERE top_n.event_number_in_session = 2)
UNION ALL
-- STEP 3
SELECT n3.*
FROM n3
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
(SELECT event_number_in_session, event_type, e_value
FROM top_n
WHERE top_n.event_number_in_session = 3))
SELECT *
FROM (
-- SELECT sum(top_n_with_next.sessions_count)
-- FROM top_n_with_next
-- WHERE event_number_in_session = 1
-- -- AND isNotNull(next_value)
-- AND (next_type, next_value) IN
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
-- -- SELECT * FROM others_n
-- -- SELECT * FROM n2
-- SELECT *
-- FROM top_n
-- );
-- Top to Top: valid
SELECT top_n_with_next.*
FROM top_n_with_next
INNER JOIN top_n
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
AND top_n_with_next.next_type = top_n.event_type
AND top_n_with_next.next_value = top_n.e_value)
UNION ALL
-- Top to Others: valid
SELECT top_n_with_next.event_number_in_session,
top_n_with_next.event_type,
top_n_with_next.e_value,
'OTHER' AS next_type,
NULL AS next_value,
SUM(top_n_with_next.sessions_count) AS sessions_count
FROM top_n_with_next
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
UNION ALL
-- Top go to Drop: valid
SELECT drop_n.event_number_in_session,
drop_n.event_type,
drop_n.e_value,
drop_n.next_type,
drop_n.next_value,
drop_n.sessions_count
FROM drop_n
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
AND drop_n.event_type = top_n.event_type
AND drop_n.e_value = top_n.e_value)
ORDER BY drop_n.event_number_in_session
UNION ALL
-- Others got to Drop: valid
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
'DROP' AS next_type,
NULL AS next_value,
SUM(others_n.sessions_count) AS sessions_count
FROM others_n
WHERE isNull(others_n.next_type)
AND others_n.event_number_in_session < 3
GROUP BY others_n.event_number_in_session, next_type, next_value
UNION ALL
-- Others got to Top:valid
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
others_n.next_type,
others_n.next_value,
SUM(others_n.sessions_count) AS sessions_count
FROM others_n
WHERE isNotNull(others_n.next_type)
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
UNION ALL
-- Others got to Others
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
'OTHERS' AS next_type,
NULL AS next_value,
SUM(sessions_count) AS sessions_count
FROM others_n
WHERE isNotNull(others_n.next_type)
AND others_n.event_number_in_session < 3
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
(SELECT event_number_in_session, event_type, e_value FROM top_n)
GROUP BY others_n.event_number_in_session)
ORDER BY event_number_in_session, sessions_count
DESC;

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.11
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.11
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6

View file

@ -0,0 +1,28 @@
import schemas
from chalicelib.core.product_analytics import events, properties
from fastapi import Depends
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
def get_event_properties(projectId: int, event_name: str = None,
context: schemas.CurrentContext = Depends(OR_context)):
if not event_name or len(event_name) == 0:
return {"data": []}
return {"data": properties.get_properties(project_id=projectId, event_name=event_name)}
@app.get('/{projectId}/events/names', tags=["dashboard"])
def get_all_events(projectId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.get_events(project_id=projectId)}
@app.post('/{projectId}/events/search', tags=["dashboard"])
def search_events(projectId: int,
# data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.search_events(project_id=projectId, data={})}

View file

@ -1,15 +0,0 @@
import schemas
from chalicelib.core.metrics import product_anaytics2
from fastapi import Depends
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/events/search', tags=["dashboard"])
def search_events(projectId: int,
# data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return product_anaytics2.search_events(project_id=projectId, data={})

View file

@ -545,6 +545,70 @@ class RequestGraphqlFilterSchema(BaseModel):
return values
class EventPredefinedPropertyType(str, Enum):
TIME = "$time"
SOURCE = "$source"
DURATION_S = "$duration_s"
DESCRIPTION = "description"
AUTO_CAPTURED = "$auto_captured"
SDK_EDITION = "$sdk_edition"
SDK_VERSION = "$sdk_version"
DEVICE_ID = "$device_id"
OS = "$os"
OS_VERSION = "$os_version"
BROWSER = "$browser"
BROWSER_VERSION = "$browser_version"
DEVICE = "$device"
SCREEN_HEIGHT = "$screen_height"
SCREEN_WIDTH = "$screen_width"
CURRENT_URL = "$current_url"
INITIAL_REFERRER = "$initial_referrer"
REFERRING_DOMAIN = "$referring_domain"
REFERRER = "$referrer"
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
SEARCH_ENGINE = "$search_engine"
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
UTM_SOURCE = "utm_source"
UTM_MEDIUM = "utm_medium"
UTM_CAMPAIGN = "utm_campaign"
COUNTRY = "$country"
STATE = "$state"
CITY = "$city"
ISSUE_TYPE = "issue_type"
TAGS = "$tags"
IMPORT = "$import"
class PropertyFilterSchema(BaseModel):
name: Union[EventPredefinedPropertyType, str] = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
value: List[Union[int, str]] = Field(...)
property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
@computed_field
@property
def is_predefined(self) -> bool:
return EventPredefinedPropertyType.has_value(self.name)
@model_validator(mode="after")
def transform_name(self):
if isinstance(self.name, Enum):
self.name = self.name.value
return self
class EventPropertiesSchema(BaseModel):
operators: List[Literal["and", "or"]] = Field(...)
filters: List[PropertyFilterSchema] = Field(...)
@model_validator(mode="after")
def event_filter_validator(self):
assert len(self.filters) == 0 \
or len(self.operators) == len(self.filters) - 1, \
"Number of operators must match the number of filter-1"
return self
class SessionSearchEventSchema2(BaseModel):
is_event: Literal[True] = True
value: List[Union[str, int]] = Field(...)
@ -553,6 +617,7 @@ class SessionSearchEventSchema2(BaseModel):
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
properties: Optional[EventPropertiesSchema] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
@ -1529,3 +1594,30 @@ class TagCreate(TagUpdate):
class ScopeSchema(BaseModel):
scope: int = Field(default=1, ge=1, le=2)
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)

View file

@ -27,9 +27,14 @@ const respond = function (req, res, data) {
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(result));
} else {
res.cork(() => {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
});
if (!res.aborted) {
res.cork(() => {
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
});
} else {
logger.debug("response aborted");
return;
}
}
const duration = performance.now() - req.startTs;
IncreaseTotalRequests();

View file

@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add tracker version to context
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Handler's logic
if req.ProjectKey == nil {
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add projectID to context
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
// Validate tracker version
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Check if the project supports mobile sessions
if !p.IsWeb() {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)

9
ee/api/.gitignore vendored
View file

@ -223,11 +223,14 @@ Pipfile.lock
/chalicelib/core/sessions/performance_event.py
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
/chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/sessions/__init__.py
/chalicelib/core/sessions/sessions_legacy_mobil.py
/chalicelib/core/sessions/sessions_search_exp.py
/chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps.py
/chalicelib/core/sourcemaps_parser.py
/chalicelib/core/sourcemaps
/chalicelib/core/tags.py
/chalicelib/core/product_analytics
/chalicelib/saml
/chalicelib/utils/__init__.py
/chalicelib/utils/args_transformer.py
@ -289,4 +292,4 @@ Pipfile.lock
/chalicelib/core/errors/errors_pg.py
/chalicelib/core/errors/errors_ch.py
/chalicelib/core/errors/errors_details.py
/chalicelib/utils/contextual_validators.py
/chalicelib/utils/contextual_validators.py

View file

@ -6,23 +6,20 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.12"
boto3 = "==1.37.16"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
elasticsearch = "==8.17.2"
jira = "==3.8.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
cachetools = "==5.5.2"
fastapi = "==0.115.11"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
gunicorn = "==23.0.0"
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}
apscheduler = "==3.11.0"
python3-saml = "==1.16.0"
python-multipart = "==0.0.20"
redis = "==5.2.1"
azure-storage-blob = "==12.24.1"

View file

@ -150,9 +150,9 @@ app.include_router(spot.public_app)
app.include_router(spot.app)
app.include_router(spot.app_apikey)
app.include_router(product_anaytics.public_app)
app.include_router(product_anaytics.app)
app.include_router(product_anaytics.app_apikey)
app.include_router(product_anaytics.public_app, prefix="/ap")
app.include_router(product_anaytics.app, prefix="/ap")
app.include_router(product_anaytics.app_apikey, prefix="/ap")
if config("ENABLE_SSO", cast=bool, default=True):
app.include_router(saml.public_app)

View file

@ -1,17 +0,0 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
from . import sessions_pg
from . import sessions_pg as sessions_legacy
from . import sessions_ch
from . import sessions_search as sessions_search_legacy
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions
from . import sessions_search_exp as sessions_search
else:
from . import sessions_pg as sessions
from . import sessions_search as sessions_search

View file

@ -44,12 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
rm -rf ./chalicelib/core/sessions/performance_event.py
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/sessions/__init__.py
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps.py
rm -rf ./chalicelib/core/sourcemaps_parser.py
rm -rf ./chalicelib/core/sourcemaps
rm -rf ./chalicelib/core/user_testing.py
rm -rf ./chalicelib/core/tags.py
rm -rf ./chalicelib/core/product_analytics
rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/__init__.py
rm -rf ./chalicelib/utils/args_transformer.py

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.11
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.11
python-decouple==3.8
pydantic[email]==2.10.6
apscheduler==3.11.0

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.11
uvicorn[standard]==0.34.0
gunicorn==23.0.0
python-decouple==3.8

View file

@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
from chalicelib.utils.TimeUTC import TimeUTC
from . import schemas
from .overrides import BaseModel, Enum, ORUnion
from .overrides import BaseModel, Enum
from .transformers_validators import remove_whitespace
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
return values
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[schemas.IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)
class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1)
_transform_name = field_validator('name', mode="before")(remove_whitespace)

View file

@ -83,9 +83,11 @@ if (process.env.uws !== "true") {
const uWrapper = function (fn) {
return (res, req) => {
res.id = 1;
res.aborted = false;
req.startTs = performance.now(); // track request's start timestamp
req.method = req.getMethod();
res.onAborted(() => {
res.aborted = true;
onAbortedOrFinishedResponse(res);
});
return fn(req, res);

View file

@ -3,20 +3,50 @@ const {getCompressionConfig} = require("./helper");
const {logger} = require('./logger');
let io;
const getServer = function () {return io;}
const getServer = function () {
return io;
const useRedis = process.env.redis === "true";
let inMemorySocketsCache = [];
let lastCacheUpdateTime = 0;
const CACHE_REFRESH_INTERVAL = parseInt(process.env.cacheRefreshInterval) || 5000;
const doFetchAllSockets = async function () {
if (useRedis) {
const now = Date.now();
logger.info(`Using in-memory cache (age: ${now - lastCacheUpdateTime}ms)`);
return inMemorySocketsCache;
} else {
try {
return await io.fetchSockets();
} catch (error) {
logger.error('Error fetching sockets:', error);
return [];
}
}
}
let redisClient;
const useRedis = process.env.redis === "true";
// Background refresher that runs independently of requests
let cacheRefresher = null;
function startCacheRefresher() {
if (cacheRefresher) clearInterval(cacheRefresher);
if (useRedis) {
const {createClient} = require("redis");
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
redisClient = createClient({url: REDIS_URL});
redisClient.on("error", (error) => logger.error(`Redis error : ${error}`));
void redisClient.connect();
cacheRefresher = setInterval(async () => {
const now = Date.now();
// Only refresh if cache is stale
if (now - lastCacheUpdateTime >= CACHE_REFRESH_INTERVAL) {
logger.debug('Background refresh triggered');
try {
const startTime = performance.now();
const result = await io.fetchSockets();
inMemorySocketsCache = result;
lastCacheUpdateTime = now;
const duration = performance.now() - startTime;
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
} catch (error) {
logger.error(`Background refresh error: ${error}`);
}
}
}, CACHE_REFRESH_INTERVAL / 2);
}
const processSocketsList = function (sockets) {
@ -28,24 +58,6 @@ const processSocketsList = function (sockets) {
return res
}
const doFetchAllSockets = async function () {
if (useRedis) {
try {
let cachedResult = await redisClient.get('fetchSocketsResult');
if (cachedResult) {
return JSON.parse(cachedResult);
}
let result = await io.fetchSockets();
let cachedString = JSON.stringify(processSocketsList(result));
await redisClient.set('fetchSocketsResult', cachedString, {EX: 5});
return result;
} catch (error) {
logger.error('Error setting value with expiration:', error);
}
}
return await io.fetchSockets();
}
const fetchSockets = async function (roomID) {
if (!io) {
return [];
@ -84,6 +96,7 @@ const createSocketIOServer = function (server, prefix) {
});
io.attachApp(server);
}
startCacheRefresher();
return io;
}

View file

@ -0,0 +1,13 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
-- The full list of event-properties (used to tell which property belongs to which event)
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name);

View file

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
@ -654,6 +654,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
ORDER BY (project_id, event_name);
-- The full list of event-properties (used to tell which property belongs to which event)
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name);
-- The full list of properties (events and users)
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
(

View file

@ -0,0 +1,30 @@
\set previous_version 'v1.22.0-ee'
\set next_version 'v1.23.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -1,4 +1,4 @@
\set or_version 'v1.22.0-ee'
\set or_version 'v1.23.0-ee'
SET client_min_messages TO NOTICE;
\set ON_ERROR_STOP true
SELECT EXISTS (SELECT 1

View file

@ -0,0 +1,3 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
DROP TABLE IF EXISTS product_analytics.event_properties;

View file

@ -0,0 +1,27 @@
\set previous_version 'v1.23.0-ee'
\set next_version 'v1.22.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -82,7 +82,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
{ stream: MediaStream; isAgent: boolean }[] | null
>([]);
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
const [callObject, setCallObject] = useState<{ end: () => void } | null>(
const [callObject, setCallObject] = useState<{ end: () => void } | null | undefined>(
null,
);
@ -135,6 +135,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
}, [peerConnectionStatus]);
const addIncomeStream = (stream: MediaStream, isAgent: boolean) => {
if (!stream.active) return;
setIncomeStream((oldState) => {
if (oldState === null) return [{ stream, isAgent }];
if (
@ -149,13 +150,8 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
});
};
const removeIncomeStream = (stream: MediaStream) => {
setIncomeStream((prevState) => {
if (!prevState) return [];
return prevState.filter(
(existingStream) => existingStream.stream.id !== stream.id,
);
});
const removeIncomeStream = () => {
setIncomeStream([]);
};
function onReject() {
@ -181,7 +177,12 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
() => {
player.assistManager.ping(AssistActionsPing.call.end, agentId);
lStream.stop.apply(lStream);
removeIncomeStream(lStream.stream);
removeIncomeStream();
},
() => {
player.assistManager.ping(AssistActionsPing.call.end, agentId);
lStream.stop.apply(lStream);
removeIncomeStream();
},
onReject,
onError,

View file

@ -34,43 +34,40 @@ function VideoContainer({
}
const iid = setInterval(() => {
const track = stream.getVideoTracks()[0];
const settings = track?.getSettings();
const isDummyVideoTrack = settings
? settings.width === 2 ||
settings.frameRate === 0 ||
(!settings.frameRate && !settings.width)
: true;
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
if (isEnabled !== shouldBeEnabled) {
setEnabled(shouldBeEnabled);
setRemoteEnabled?.(shouldBeEnabled);
if (track) {
if (!track.enabled) {
setEnabled(false);
setRemoteEnabled?.(false);
} else {
setEnabled(true);
setRemoteEnabled?.(true);
}
} else {
setEnabled(false);
setRemoteEnabled?.(false);
}
}, 500);
return () => clearInterval(iid);
}, [stream, isEnabled]);
}, [stream]);
return (
<div
className="flex-1"
style={{
display: isEnabled ? undefined : 'none',
width: isEnabled ? undefined : '0px!important',
height: isEnabled ? undefined : '0px!important',
height: isEnabled ? undefined : '0px !important',
border: '1px solid grey',
transform: local ? 'scaleX(-1)' : undefined,
display: isEnabled ? 'block' : 'none',
}}
>
<video autoPlay ref={ref} muted={muted} style={{ height }} />
{isAgent ? (
<div
style={{
position: 'absolute',
}}
>
{t('Agent')}
</div>
) : null}
<video
autoPlay
ref={ref}
muted={muted}
style={{ height }}
/>
</div>
);
}

View file

@ -16,10 +16,10 @@ function ProfilerDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerProfiler from '@openreplay/tracker-profiler';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
const fn = profiler('call_name')(() => {
//...
}, thisArg); // thisArg is optional`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
function AssistNpm(props) {
const { t } = useTranslation();
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerAssist from '@openreplay/tracker-assist';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}',
});
tracker.start()
tracker.use(trackerAssist(options)); // check the list of available options below`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerAssist from '@openreplay/tracker-assist/cjs';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}'
});
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below

View file

@ -14,19 +14,20 @@ function GraphQLDoc() {
const projectKey = siteId
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerGraphQL from '@openreplay/tracker-graphql';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
//...
export const recordGraphQL = tracker.use(trackerGraphQL());`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -15,20 +15,21 @@ function MobxDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
const mobxUsage = `import { tracker } from '@openreplay/tracker';
import trackerMobX from '@openreplay/tracker-mobx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below
tracker.start();
`;
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerMobX from '@openreplay/tracker-mobx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below

View file

@ -16,10 +16,10 @@ function NgRxDoc() {
: sites[0]?.projectKey;
const usage = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerNgRx from '@openreplay/tracker-ngrx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
export class AppModule {}`;
const usageCjs = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -17,10 +17,10 @@ function PiniaDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()

View file

@ -16,10 +16,10 @@ function ReduxDoc() {
: sites[0]?.projectKey;
const usage = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerRedux from '@openreplay/tracker-redux';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = createStore(
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
);`;
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerRedux from '@openreplay/tracker-redux/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,10 +16,10 @@ function VueDoc() {
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = new Vuex.Store({
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
});`;
const usageCjs = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerVuex from '@openreplay/tracker-vuex/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,11 +16,10 @@ function ZustandDoc(props) {
: sites[0]?.projectKey;
const usage = `import create from "zustand";
import Tracker from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});
@ -43,11 +42,12 @@ const useBearStore = create(
)
`;
const usageCjs = `import create from "zustand";
import Tracker from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});

View file

@ -11,6 +11,7 @@ import { useTranslation } from 'react-i18next';
const initTableProps = [
{
title: <span className="font-medium">Series</span>,
_pureTitle: 'Series',
dataIndex: 'seriesName',
key: 'seriesName',
sorter: (a, b) => a.seriesName.localeCompare(b.seriesName),
@ -18,6 +19,7 @@ const initTableProps = [
},
{
title: <span className="font-medium">Avg.</span>,
_pureTitle: 'Avg.',
dataIndex: 'average',
key: 'average',
sorter: (a, b) => a.average - b.average,
@ -94,6 +96,8 @@ function WidgetDatatable(props: Props) {
tableCols.push({
title: <span className="font-medium">{name}</span>,
dataIndex: `${name}_${i}`,
// @ts-ignore
_pureTitle: name,
key: `${name}_${i}`,
sorter: (a, b) => a[`${name}_${i}`] - b[`${name}_${i}`],
});

View file

@ -1,52 +1,80 @@
import React, { useEffect } from 'react';
import React, { useEffect, useState } from 'react';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import ReCAPTCHA from 'react-google-recaptcha';
import { Form, Input, Loader, Icon, Message } from 'UI';
import { Button } from 'antd';
import { validatePassword } from 'App/validate';
import { PASSWORD_POLICY } from 'App/constants';
import stl from './forgotPassword.module.css';
import { useTranslation } from 'react-i18next';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
const recaptchaRef = React.createRef();
const ERROR_DONT_MATCH = (t) => t("Passwords don't match.");
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
const { CAPTCHA_SITE_KEY } = window.env;
interface Props {
params: any;
}
function CreatePassword(props: Props) {
function CreatePassword(props: Props & WithCaptchaProps) {
const { t } = useTranslation();
const { params } = props;
const { userStore } = useStore();
const { loading } = userStore;
const { resetPassword } = userStore;
const [error, setError] = React.useState<string | null>(null);
const [validationError, setValidationError] = React.useState<string | null>(
null,
);
const [updated, setUpdated] = React.useState(false);
const [passwordRepeat, setPasswordRepeat] = React.useState('');
const [password, setPassword] = React.useState('');
const [error, setError] = useState<string | null>(null);
const [validationError, setValidationError] = useState<string | null>(null);
const [updated, setUpdated] = useState(false);
const [passwordRepeat, setPasswordRepeat] = useState('');
const [password, setPassword] = useState('');
const pass = params.get('pass');
const invitation = params.get('invitation');
const handleSubmit = () => {
if (!validatePassword(password)) {
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
const handleSubmit = (token?: string) => {
if (!validatePassword(password) || !token) {
return;
}
void resetPassword({ invitation, pass, password });
resetPassword({
invitation,
pass,
password,
'g-recaptcha-response': token
})
.then(() => {
setUpdated(true);
})
.catch((err) => {
setError(err.message);
// Reset captcha for the next attempt
resetCaptcha();
});
};
const onSubmit = (e: any) => {
e.preventDefault();
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
const onSubmit = () => {
// Validate before attempting captcha verification
if (!validatePassword(password) || password !== passwordRepeat) {
setValidationError(
password !== passwordRepeat
? ERROR_DONT_MATCH(t)
: PASSWORD_POLICY(t)
);
return;
}
// Reset any previous errors
setError(null);
setValidationError(null);
submitWithCaptcha({ pass, invitation, password })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error) => {
console.error('Captcha verification failed:', error);
// The component will handle showing appropriate messages
});
};
const write = (e: any) => {
@ -63,7 +91,7 @@ function CreatePassword(props: Props) {
} else {
setValidationError(null);
}
}, [passwordRepeat, password]);
}, [passwordRepeat, password, t]);
return (
<Form
@ -73,19 +101,8 @@ function CreatePassword(props: Props) {
>
{!error && (
<>
<Loader loading={loading}>
<Loader loading={loading || isVerifyingCaptcha}>
<div data-hidden={updated} className="w-full">
{CAPTCHA_ENABLED && (
<div className={stl.recaptcha}>
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
sitekey={CAPTCHA_SITE_KEY}
onChange={(token: any) => handleSubmit(token)}
/>
</div>
)}
<Form.Field>
<label>{t('New password')}</label>
<Input
@ -132,10 +149,15 @@ function CreatePassword(props: Props) {
<Button
htmlType="submit"
type="primary"
loading={loading}
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha || validationError !== null}
className="w-full mt-4"
>
{t('Create')}
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Processing...')
: t('Create')}
</Button>
)}
</>
@ -153,4 +175,4 @@ function CreatePassword(props: Props) {
);
}
export default observer(CreatePassword);
export default withCaptcha(observer(CreatePassword));

View file

@ -1,24 +1,26 @@
import React from 'react';
import React, { useState } from 'react';
import { Loader, Icon } from 'UI';
import ReCAPTCHA from 'react-google-recaptcha';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import { Form, Input, Button, Typography } from 'antd';
import { SquareArrowOutUpRight } from 'lucide-react';
import { useTranslation } from 'react-i18next';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
function ResetPasswordRequest() {
interface Props {
}
function ResetPasswordRequest(props: Props & WithCaptchaProps) {
const { t } = useTranslation();
const { userStore } = useStore();
const { loading } = userStore;
const { requestResetPassword } = userStore;
const recaptchaRef = React.createRef();
const [requested, setRequested] = React.useState(false);
const [email, setEmail] = React.useState('');
const [error, setError] = React.useState(null);
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
const { CAPTCHA_SITE_KEY } = window.env;
const [smtpError, setSmtpError] = React.useState<boolean>(false);
const [requested, setRequested] = useState(false);
const [email, setEmail] = useState('');
const [error, setError] = useState(null);
const [smtpError, setSmtpError] = useState<boolean>(false);
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
const write = (e: any) => {
const { name, value } = e.target;
@ -26,21 +28,22 @@ function ResetPasswordRequest() {
};
const onSubmit = () => {
// e.preventDefault();
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
// Validation check
if (!email || email.trim() === '') {
return;
}
submitWithCaptcha({ email: email.trim() })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error: any) => {
console.error('Captcha verification failed:', error);
});
};
const handleSubmit = (token?: any) => {
if (
CAPTCHA_ENABLED &&
recaptchaRef.current &&
(token === null || token === undefined)
)
return;
const handleSubmit = (token?: string) => {
if (!token) return;
setError(null);
requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token })
@ -50,29 +53,21 @@ function ResetPasswordRequest() {
}
setError(err.message);
// Reset captcha for the next attempt
resetCaptcha();
})
.finally(() => {
setRequested(true);
});
};
return (
<Form
onFinish={onSubmit}
style={{ minWidth: '50%' }}
className="flex flex-col"
>
<Loader loading={false}>
{CAPTCHA_ENABLED && (
<div className="flex justify-center">
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
data-hidden={requested}
sitekey={CAPTCHA_SITE_KEY}
onChange={(token: any) => handleSubmit(token)}
/>
</div>
)}
<Loader loading={loading || isVerifyingCaptcha}>
{!requested && (
<>
<Form.Item>
@ -92,10 +87,14 @@ function ResetPasswordRequest() {
<Button
type="primary"
htmlType="submit"
loading={loading}
disabled={loading}
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha}
>
{t('Email Password Reset Link')}
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Processing...')
: t('Email Password Reset Link')}
</Button>
</>
)}
@ -146,4 +145,4 @@ function ResetPasswordRequest() {
);
}
export default observer(ResetPasswordRequest);
export default withCaptcha(observer(ResetPasswordRequest));

View file

@ -1,23 +1,18 @@
import withPageTitle from 'HOCs/withPageTitle';
import cn from 'classnames';
import React, { useEffect, useMemo, useRef, useState } from 'react';
// Consider using a different approach for titles in functional components
import ReCAPTCHA from 'react-google-recaptcha';
import React, { useEffect, useState } from 'react';
import { useHistory } from 'react-router-dom';
import { observer } from 'mobx-react-lite';
import { toast } from 'react-toastify';
import { ENTERPRISE_REQUEIRED } from 'App/constants';
import { forgotPassword, signup } from 'App/routes';
import { Icon, Link, Loader, Tooltip } from 'UI';
import { Icon, Link, Loader } from 'UI';
import { Button, Form, Input } from 'antd';
import Copyright from 'Shared/Copyright';
import stl from './login.module.css';
import { useTranslation } from 'react-i18next';
import { useStore } from 'App/mstore';
import LanguageSwitcher from '../LanguageSwitcher';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
import SSOLogin from './SSOLogin';
const FORGOT_PASSWORD = forgotPassword();
const SIGNUP_ROUTE = signup();
@ -26,14 +21,15 @@ interface LoginProps {
location: Location;
}
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
function Login({ location }: LoginProps) {
function Login({
location,
submitWithCaptcha,
isVerifyingCaptcha,
resetCaptcha,
}: LoginProps & WithCaptchaProps) {
const { t } = useTranslation();
const [email, setEmail] = useState('');
const [password, setPassword] = useState('');
// const CAPTCHA_ENABLED = useMemo(() => window.env.CAPTCHA_ENABLED === 'true', []);
const recaptchaRef = useRef<ReCAPTCHA>(null);
const { loginStore, userStore } = useStore();
const { errors } = userStore.loginRequest;
const { loading } = loginStore;
@ -49,7 +45,6 @@ function Login({ location }: LoginProps) {
}, [authDetails]);
useEffect(() => {
// void fetchTenants();
const jwt = params.get('jwt');
const spotJwt = params.get('spotJwt');
if (spotJwt) {
@ -108,32 +103,36 @@ function Login({ location }: LoginProps) {
if (resp) {
userStore.syntheticLogin(resp);
setJwt({ jwt: resp.jwt, spotJwt: resp.spotJwt ?? null });
handleSpotLogin(resp.spotJwt);
if (resp.spotJwt) {
handleSpotLogin(resp.spotJwt);
}
}
})
.catch((e) => {
userStore.syntheticLoginError(e);
resetCaptcha();
});
};
const onSubmit = () => {
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
if (!email || !password) {
return;
}
};
const ssoLink =
window !== window.top
? `${window.location.origin}/api/sso/saml2?iFrame=true`
: `${window.location.origin}/api/sso/saml2`;
submitWithCaptcha({ email: email.trim(), password })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error: any) => {
console.error('Captcha error:', error);
});
};
return (
<div className="flex items-center justify-center h-screen">
<div className="flex flex-col items-center">
<div className="m-10 ">
<img src="/assets/logo.svg" width={200} />
<img src="/assets/logo.svg" width={200} alt="Company Logo" />
</div>
<div className="border rounded-lg bg-white shadow-sm">
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
@ -145,15 +144,7 @@ function Login({ location }: LoginProps) {
className={cn('flex items-center justify-center flex-col')}
style={{ width: '350px' }}
>
<Loader loading={loading}>
{CAPTCHA_ENABLED && (
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
sitekey={window.env.CAPTCHA_SITE_KEY}
onChange={(token) => handleSubmit(token)}
/>
)}
<Loader loading={loading || isVerifyingCaptcha}>
<div style={{ width: '350px' }} className="px-8">
<Form.Item>
<label>{t('Email Address')}</label>
@ -186,8 +177,8 @@ function Login({ location }: LoginProps) {
</Loader>
{errors && errors.length ? (
<div className="px-8 my-2 w-full">
{errors.map((error) => (
<div className="flex items-center bg-red-lightest rounded p-3">
{errors.map((error, index) => (
<div key={index} className="flex items-center bg-red-lightest rounded p-3">
<Icon name="info" color="red" size="20" />
<span className="color-red ml-2">
{error}
@ -204,8 +195,14 @@ function Login({ location }: LoginProps) {
className="mt-2 w-full text-center rounded-lg"
type="primary"
htmlType="submit"
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha}
>
{t('Login')}
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Logging in...')
: t('Login')}
</Button>
<div className="my-8 flex justify-center items-center flex-wrap">
@ -219,63 +216,12 @@ function Login({ location }: LoginProps) {
</div>
</Form>
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
{authDetails.sso ? (
<a href={ssoLink} rel="noopener noreferrer">
<Button type="text" htmlType="submit">
{`${t('Login with SSO')} ${
authDetails.ssoProvider
? `(${authDetails.ssoProvider})`
: ''
}`}
</Button>
</a>
) : (
<Tooltip
delay={0}
title={
<div className="text-center">
{authDetails.edition === 'ee' ? (
<span>
{t('SSO has not been configured.')}
<br />
{t('Please reach out to your admin.')}
</span>
) : (
ENTERPRISE_REQUEIRED(t)
)}
</div>
}
placement="top"
>
<Button
type="text"
htmlType="submit"
className="pointer-events-none opacity-30"
>
{`${t('Login with SSO')} ${
authDetails.ssoProvider
? `(${authDetails.ssoProvider})`
: ''
}`}
</Button>
</Tooltip>
)}
</div>
</div>
<div
className={cn('flex items-center w-96 justify-center my-8', {
'!hidden': !authDetails?.enforceSSO,
})}
>
<a href={ssoLink} rel="noopener noreferrer">
<Button type="primary">
{`${t('Login with SSO')} ${
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
}`}
</Button>
</a>
<SSOLogin authDetails={authDetails} />
</div>
{authDetails?.enforceSSO && (
<SSOLogin authDetails={authDetails} enforceSSO={true} />
)}
</div>
</div>
@ -287,4 +233,6 @@ function Login({ location }: LoginProps) {
);
}
export default withPageTitle('Login - OpenReplay')(observer(Login));
export default withPageTitle('Login - OpenReplay')(
withCaptcha(observer(Login))
);

View file

@ -0,0 +1,78 @@
import React from 'react';
import cn from 'classnames';
import { Button, Tooltip } from 'antd';
import { useTranslation } from 'react-i18next';
import { ENTERPRISE_REQUEIRED } from 'App/constants';
import stl from './login.module.css';
import { useStore } from 'App/mstore';
interface SSOLoginProps {
authDetails: any;
enforceSSO?: boolean;
}
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
const { userStore } = useStore();
const { t } = useTranslation();
const { isEnterprise } = userStore;
const getSSOLink = () =>
window !== window.top
? `${window.location.origin}/api/sso/saml2?iFrame=true`
: `${window.location.origin}/api/sso/saml2`;
const ssoLink = getSSOLink();
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
}`;
if (enforceSSO) {
return (
<div className={cn('flex items-center w-96 justify-center my-8')}>
<a href={ssoLink} rel="noopener noreferrer">
<Button type="primary">{ssoButtonText}</Button>
</a>
</div>
);
}
return (
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
{authDetails.sso ? (
<a href={ssoLink} rel="noopener noreferrer">
<Button type="text" htmlType="submit">
{ssoButtonText}
</Button>
</a>
) : (
<Tooltip
title={
<div className="text-center">
{isEnterprise ? (
<span>
{t('SSO has not been configured.')}
<br />
{t('Please reach out to your admin.')}
</span>
) : (
ENTERPRISE_REQUEIRED(t)
)}
</div>
}
placement="top"
>
<span className="cursor-not-allowed">
<Button
type="text"
htmlType="submit"
disabled={true}
>
{ssoButtonText}
</Button>
</span>
</Tooltip>
)}
</div>
);
};
export default SSOLogin;

View file

@ -1,16 +1,14 @@
import React from 'react';
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router';
import { withRouter } from 'react-router-dom';
import { OB_TABS, onboarding as onboardingRoute, withSiteId } from 'App/routes';
import { Icon } from 'UI';
import IdentifyUsersTab from './components/IdentifyUsersTab';
import InstallOpenReplayTab from './components/InstallOpenReplayTab';
import IntegrationsTab from './components/IntegrationsTab';
import ManageUsersTab from './components/ManageUsersTab';
import SideMenu from './components/SideMenu';
import { useTranslation } from 'react-i18next';
import { Smartphone, AppWindow } from 'lucide-react';
interface Props {
match: {
@ -33,7 +31,7 @@ function Onboarding(props: Props) {
{
label: (
<div className="font-semibold flex gap-2 items-center">
<Icon name="browser/browser" size={16} />
<AppWindow size={16} />
&nbsp;{t('Web')}
</div>
),
@ -42,7 +40,7 @@ function Onboarding(props: Props) {
{
label: (
<div className="font-semibold flex gap-2 items-center">
<Icon name="mobile" size={16} />
<Smartphone size={16} />
&nbsp;{t('Mobile')}
</div>
),

View file

@ -130,18 +130,20 @@ function IdentifyUsersTab(props: Props) {
'To identify users through metadata, you will have to explicitly specify your user metadata so it can be injected during sessions. Follow the below steps',
)}
</p>
<div className="flex items-start">
<div className="flex items-center gap-2 mb-2">
<CircleNumber text="1" />
<MetadataList />
</div>
<div className="my-6" />
<div className="flex items-start">
<CircleNumber text="2" />
<div className="pt-1 w-full">
<div>
<CircleNumber text="2" />
<span className="font-bold">
{t('Inject metadata when recording sessions')}
</span>
</div>
<div className="pt-1 w-full">
<div className="my-2">
{t('Use the')}&nbsp;
<span className="highlight-blue">setMetadata</span>{' '}

View file

@ -55,16 +55,14 @@ function MetadataList() {
<Button type="default" onClick={() => openModal()}>
{t('Add Metadata')}
</Button>
<div className="flex ml-2">
{fields.map((f, index) => (
<TagBadge
key={index}
text={f.key}
onRemove={() => removeMetadata(f)}
outline
/>
))}
</div>
{fields.map((f, index) => (
<TagBadge
key={index}
text={f.key}
onRemove={() => removeMetadata(f)}
outline
/>
))}
</div>
);
}

View file

@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
import { useTranslation } from 'react-i18next';
const installationCommand = 'npm i @openreplay/tracker';
const usageCode = `import Tracker from '@openreplay/tracker';
const usageCode = `import { tracker } from '@openreplay/tracker';
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});
tracker.start()`;
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});

View file

@ -19,7 +19,7 @@ const AUTOREFRESH_INTERVAL = 2 * 60 * 1000;
const PER_PAGE = 10;
function LiveSessionList() {
const { searchStoreLive, sessionStore, customFieldStore } = useStore();
const { searchStoreLive, sessionStore, customFieldStore, projectsStore } = useStore();
const filter = searchStoreLive.instance;
const list = sessionStore.liveSessions;
const { totalLiveSessions } = sessionStore;
@ -72,6 +72,12 @@ function LiveSessionList() {
void searchStoreLive.fetchSessions();
};
useEffect(() => {
if (projectsStore.activeSiteId) {
void searchStoreLive.fetchSessions(true);
}
}, [projectsStore.activeSiteId])
const onUserClick = (userId: string, userAnonymousId: string) => {
if (userId) {
searchStoreLive.addFilterByKeyAndValue(FilterKey.USERID, userId);
@ -98,7 +104,7 @@ function LiveSessionList() {
<div>
<div className="bg-white py-3 rounded-lg border shadow-sm">
<div className="flex mb-4 pb-2 px-3 justify-between items-center border-b border-b-gray-lighter">
<LiveSessionReloadButton onClick={refetch} />
<LiveSessionReloadButton />
<div className="flex items-center">
<div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium">{t('Sort By')}</span>

View file

@ -4,15 +4,11 @@ import { observer } from 'mobx-react-lite';
import ReloadButton from '../ReloadButton';
import { useTranslation } from 'react-i18next';
interface Props {
onClick: () => void;
}
function LiveSessionReloadButton(props: Props) {
function LiveSessionReloadButton() {
const { t } = useTranslation();
const { sessionStore } = useStore();
const { onClick } = props;
const loading = sessionStore.loadingLiveSessions;
const { searchStoreLive } = useStore();
const onClick = searchStoreLive.fetchSessions
const loading = searchStoreLive.loading;
return (
<ReloadButton label={t('Refresh')} buttonSize={'small'} iconSize={14} loading={loading} onClick={onClick} className="cursor-pointer" />
);

View file

@ -18,6 +18,7 @@ export default function ReloadButton(props: Props) {
<Button
type="default"
size={buttonSize}
loading={loading}
onClick={onClick}
icon={<SyncOutlined style={{ fontSize: iconSize }} />}
>

View file

@ -5,17 +5,18 @@ import stl from './installDocs.module.css';
import { useTranslation } from 'react-i18next';
const installationCommand = 'npm i @openreplay/tracker';
const usageCode = `import Tracker from '@openreplay/tracker';
const usageCode = `import { tracker } from '@openreplay/tracker';
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});
tracker.start()`;
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});

View file

@ -31,7 +31,7 @@ const Input = React.forwardRef((props: Props, ref: any) => {
{icon && (
<Icon
name={icon}
className="absolute top-0 bottom-0 my-auto ml-4"
className="absolute top-0 bottom-0 my-auto ml-4 z-10"
size="14"
/>
)}

View file

@ -28,18 +28,18 @@ export const checkValues = (key: any, value: any) => {
};
export const filterMap = ({
category,
value,
key,
operator,
sourceOperator,
source,
custom,
isEvent,
filters,
sort,
order
}: any) => ({
category,
value,
key,
operator,
sourceOperator,
source,
custom,
isEvent,
filters,
sort,
order
}: any) => ({
value: checkValues(key, value),
custom,
type: category === FilterCategory.METADATA ? FilterKey.METADATA : key,
@ -254,7 +254,7 @@ class SearchStore {
this.savedSearch = new SavedSearch({});
sessionStore.clearList();
void this.fetchSessions(true);
// void this.fetchSessions(true);
}
async checkForLatestSessionCount(): Promise<void> {

View file

@ -75,6 +75,8 @@ class SearchStoreLive {
loadingFilterSearch = false;
loading = false;
constructor() {
makeAutoObservable(this);
@ -242,11 +244,25 @@ class SearchStoreLive {
});
};
async fetchSessions() {
await sessionStore.fetchLiveSessions({
...this.instance.toSearch(),
page: this.currentPage,
});
setLoading = (val: boolean) => {
this.loading = val;
}
fetchSessions = async (force?: boolean) => {
if (!force && this.loading) {
return;
}
this.setLoading(true)
try {
await sessionStore.fetchLiveSessions({
...this.instance.toSearch(),
page: this.currentPage,
});
} catch (e) {
console.error('Error fetching sessions:', e);
} finally {
this.setLoading(false)
}
}
}

View file

@ -114,7 +114,9 @@ class UserStore {
get isEnterprise() {
return (
this.account?.edition === 'ee' ||
this.authStore.authDetails?.edition === 'ee'
this.account?.edition === 'msaas' ||
this.authStore.authDetails?.edition === 'ee' ||
this.authStore.authDetails?.edition === 'msaas'
);
}
@ -245,8 +247,8 @@ class UserStore {
const errStr = err.errors[0]
? err.errors[0].includes('already exists')
? this.t(
"This email is already linked to an account or team on OpenReplay and can't be used again.",
)
"This email is already linked to an account or team on OpenReplay and can't be used again.",
)
: err.errors[0]
: this.t('Error saving user');
toast.error(errStr);
@ -416,9 +418,9 @@ class UserStore {
this.jwt = data.jwt;
this.spotJwt = data.spotJwt;
});
} catch (error) {
toast.error(this.t('Error resetting your password; please try again'));
return error.response;
} catch (e) {
toast.error(e.message || this.t('Error resetting your password; please try again'));
throw e;
} finally {
runInAction(() => {
this.loading = false;
@ -663,14 +665,14 @@ class AuthStore {
{
key: 'authDetails',
serialize: (ad) => {
delete ad.edition;
// delete ad.edition;
return Object.keys(ad).length > 0
? JSON.stringify(ad)
: JSON.stringify({});
},
deserialize: (json) => {
const ad = JSON.parse(json);
delete ad.edition;
// delete ad.edition;
return ad;
},
},

View file

@ -150,10 +150,10 @@ export default class MessageLoader {
});
const sortedMsgs = msgs
// .sort((m1, m2) => m1.time - m2.time);
// .sort((m1, m2) => m1.time - m2.time)
.sort(brokenDomSorter)
.sort(sortIframes);
if (brokenMessages > 0) {
console.warn(
'Broken timestamp messages',
@ -383,7 +383,6 @@ const DOMMessages = [
MType.CreateElementNode,
MType.CreateTextNode,
MType.MoveNode,
MType.RemoveNode,
MType.CreateIFrameDocument,
];
@ -395,6 +394,11 @@ function brokenDomSorter(m1: PlayerMsg, m2: PlayerMsg) {
if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
return 1;
if (m1.tp === MType.RemoveNode)
return 1;
if (m2.tp === MType.RemoveNode)
return -1;
const m1IsDOM = DOMMessages.includes(m1.tp);
const m2IsDOM = DOMMessages.includes(m2.tp);
if (m1IsDOM && m2IsDOM) {

View file

@ -185,11 +185,14 @@ export default class AssistManager {
const socket: Socket = (this.socket = io(urlObject.origin, {
withCredentials: true,
multiplex: true,
transports: ['websocket'],
transports: ['polling', 'websocket'],
path: '/ws-assist/socket',
auth: {
token: agentToken,
},
extraHeaders: {
sessionId: this.session.sessionId,
},
query: {
peerId: this.peerID,
projectId,

View file

@ -185,8 +185,7 @@ export default class Call {
pc.ontrack = (event) => {
const stream = event.streams[0];
if (stream && !this.videoStreams[remotePeerId]) {
const clonnedStream = stream.clone();
this.videoStreams[remotePeerId] = clonnedStream.getVideoTracks()[0];
this.videoStreams[remotePeerId] = stream.getVideoTracks()[0];
if (this.store.get().calling !== CallingState.OnCall) {
this.store.update({ calling: CallingState.OnCall });
}
@ -305,22 +304,18 @@ export default class Call {
}
try {
// if the connection is not established yet, then set remoteDescription to peer
if (!pc.localDescription) {
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
if (isAgent) {
this.socket.emit('WEBRTC_AGENT_CALL', {
from: this.callID,
answer,
toAgentId: getSocketIdByCallId(fromCallId),
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
});
} else {
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
}
await pc.setRemoteDescription(new RTCSessionDescription(data.offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
if (isAgent) {
this.socket.emit('WEBRTC_AGENT_CALL', {
from: this.callID,
answer,
toAgentId: getSocketIdByCallId(fromCallId),
type: WEBRTC_CALL_AGENT_EVENT_TYPES.ANSWER,
});
} else {
logger.warn('Skipping setRemoteDescription: Already in stable state');
this.socket.emit('webrtc_call_answer', { from: fromCallId, answer });
}
} catch (e) {
logger.error('Error setting remote description from answer', e);
@ -388,13 +383,13 @@ export default class Call {
private handleCallEnd() {
// If the call is not completed, then call onCallEnd
if (this.store.get().calling !== CallingState.NoCall) {
this.callArgs && this.callArgs.onCallEnd();
this.callArgs && this.callArgs.onRemoteCallEnd();
}
// change state to NoCall
this.store.update({ calling: CallingState.NoCall });
// Close all created RTCPeerConnection
Object.values(this.connections).forEach((pc) => pc.close());
this.callArgs?.onCallEnd();
this.callArgs?.onRemoteCallEnd();
// Clear connections
this.connections = {};
this.callArgs = null;
@ -414,7 +409,7 @@ export default class Call {
// Close all connections and reset callArgs
Object.values(this.connections).forEach((pc) => pc.close());
this.connections = {};
this.callArgs?.onCallEnd();
this.callArgs?.onRemoteCallEnd();
this.store.update({ calling: CallingState.NoCall });
this.callArgs = null;
} else {
@ -443,7 +438,8 @@ export default class Call {
private callArgs: {
localStream: LocalStream;
onStream: (s: MediaStream, isAgent: boolean) => void;
onCallEnd: () => void;
onRemoteCallEnd: () => void;
onLocalCallEnd: () => void;
onReject: () => void;
onError?: (arg?: any) => void;
} | null = null;
@ -451,14 +447,16 @@ export default class Call {
setCallArgs(
localStream: LocalStream,
onStream: (s: MediaStream, isAgent: boolean) => void,
onCallEnd: () => void,
onRemoteCallEnd: () => void,
onLocalCallEnd: () => void,
onReject: () => void,
onError?: (e?: any) => void,
) {
this.callArgs = {
localStream,
onStream,
onCallEnd,
onRemoteCallEnd,
onLocalCallEnd,
onReject,
onError,
};
@ -549,7 +547,7 @@ export default class Call {
void this.initiateCallEnd();
Object.values(this.connections).forEach((pc) => pc.close());
this.connections = {};
this.callArgs?.onCallEnd();
this.callArgs?.onLocalCallEnd();
}
}

View file

@ -138,26 +138,9 @@ export default class UserService {
}
async resetPassword(data: any) {
try {
const response = await this.client.post('/password/reset', data);
const responseData = await response.json();
if (responseData.errors) {
throw new Error(
responseData.errors[0] || 'An unexpected error occurred.',
);
}
return responseData || {};
} catch (error: any) {
if (error.response) {
const errorData = await error.response.json();
const errorMessage = errorData.errors
? errorData.errors[0]
: 'An unexpected error occurred.';
throw new Error(errorMessage);
}
throw new Error('An unexpected error occurred.');
}
const response = await this.client.post('/password/reset', data);
const responseData = await response.json();
return responseData || {};
}
async requestResetPassword(data: any) {

View file

@ -597,8 +597,7 @@ function saveAsFile(blob: Blob, filename: string) {
}
export function exportAntCsv(tableColumns, tableData, filename = 'table.csv') {
console.log(tableColumns, tableData);
const headers = tableColumns.map((col) => col.title).join(',');
const headers = tableColumns.map((col) => col._pureTitle).join(',');
const rows = tableData.map((row) =>
tableColumns
.map((col) => {

View file

@ -0,0 +1,219 @@
import React, { useState, useRef, ComponentType, ReactNode, useCallback, useEffect, useLayoutEffect } from 'react';
import ReCAPTCHA from 'react-google-recaptcha';
import { toast } from "react-toastify";
// Define a more specific type for submission data
export interface SubmissionData {
[key: string]: any;
}
export interface WithCaptchaProps {
submitWithCaptcha: (data: SubmissionData) => Promise<any>;
hasCaptchaError: boolean;
isVerifyingCaptcha: boolean;
resetCaptcha: () => void;
}
export interface WithCaptchaOptions {
position?: 'visible' | 'hidden';
errorMessage?: string;
theme?: 'light' | 'dark';
size?: 'normal' | 'compact' | 'invisible';
}
// Safely get environment variables with fallbacks
const getCaptchaConfig = () => {
const enabled = typeof window !== 'undefined' &&
window.env?.CAPTCHA_ENABLED === 'true';
const siteKey = typeof window !== 'undefined' ?
window.env?.CAPTCHA_SITE_KEY || '' : '';
return { enabled, siteKey };
};
/**
* Higher-Order Component that adds reCAPTCHA functionality to a form component
*
* @param WrappedComponent The component to wrap with CAPTCHA functionality
* @param options Configuration options for the CAPTCHA behavior
* @returns A new component with CAPTCHA capabilities
*/
const withCaptcha = <P extends object>(
WrappedComponent: ComponentType<P & WithCaptchaProps>,
options: WithCaptchaOptions = {}
): React.FC<P> => {
// Default options
const {
position = 'hidden',
errorMessage = 'Please complete the CAPTCHA verification',
theme = 'light',
size = 'invisible'
} = options;
const WithCaptchaComponent: React.FC<P> = (props: P) => {
const { enabled: CAPTCHA_ENABLED, siteKey: CAPTCHA_SITE_KEY } = getCaptchaConfig();
const [captchaToken, setCaptchaToken] = useState<string | null>(null);
const [isVerifyingCaptcha, setIsVerifyingCaptcha] = useState<boolean>(false);
const [tokenExpired, setTokenExpired] = useState<boolean>(false);
const recaptchaRef = useRef<ReCAPTCHA>(null);
// Reset token when expired
useEffect(() => {
if (tokenExpired) {
setCaptchaToken(null);
setTokenExpired(false);
}
}, [tokenExpired]);
// Handle token expiration
const onCaptchaExpired = useCallback(() => {
setTokenExpired(true);
if (CAPTCHA_ENABLED) {
toast.warning('CAPTCHA verification expired. Please verify again.');
}
}, [CAPTCHA_ENABLED]);
// Handle token change
let onCaptchaChange = (token: string | null) => {
console.log('Standard captcha callback received token:', !!token);
setCaptchaToken(token);
setTokenExpired(false);
};
// Reset captcha manually
const resetCaptcha = useCallback(() => {
recaptchaRef.current?.reset();
setCaptchaToken(null);
}, []);
// Submit with captcha verification
const submitWithCaptcha = useCallback(
(data: SubmissionData): Promise<any> => {
return new Promise((resolve, reject) => {
if (!CAPTCHA_ENABLED) {
// CAPTCHA not enabled, resolve with original data
resolve(data);
return;
}
setIsVerifyingCaptcha(true);
// Special handling for invisible reCAPTCHA
if (size === 'invisible') {
// Create a direct token handler function
const handleToken = (receivedToken: string | null) => {
console.log('reCAPTCHA token received:', !!receivedToken);
if (receivedToken) {
// We have a token, resolve the promise
const dataWithCaptcha = {
...data,
'g-recaptcha-response': receivedToken
};
resolve(dataWithCaptcha);
// Reset for next use
setTimeout(() => {
recaptchaRef.current?.reset();
setIsVerifyingCaptcha(false);
}, 100);
}
};
// Set up a callback directly on the reCAPTCHA ref
if (recaptchaRef.current) {
console.log('Executing invisible reCAPTCHA');
// Execute the reCAPTCHA challenge
recaptchaRef.current.executeAsync()
.then((token: string | null) => {
handleToken(token);
})
.catch((error: any) => {
console.error('reCAPTCHA execution failed:', error);
setIsVerifyingCaptcha(false);
reject(new Error('CAPTCHA verification failed'));
});
// Set a timeout in case the promise doesn't resolve
setTimeout(() => {
if (isVerifyingCaptcha) {
console.log('reCAPTCHA verification timed out');
setIsVerifyingCaptcha(false);
toast.error(errorMessage || 'Verification timed out. Please try again.');
reject(new Error('CAPTCHA verification timeout'));
}
}, 5000);
} else {
console.error('reCAPTCHA ref not available');
setIsVerifyingCaptcha(false);
reject(new Error('CAPTCHA component not initialized'));
}
} else if (captchaToken) {
// Standard reCAPTCHA with token already available
const dataWithCaptcha = {
...data,
'g-recaptcha-response': captchaToken
};
resolve(dataWithCaptcha);
recaptchaRef.current?.reset();
setCaptchaToken(null);
setIsVerifyingCaptcha(false);
} else {
// Standard reCAPTCHA but no token yet
toast.error(errorMessage || 'Please complete the CAPTCHA verification');
reject(new Error('CAPTCHA verification required'));
setIsVerifyingCaptcha(false);
}
});
},
[CAPTCHA_ENABLED, captchaToken, errorMessage, size, isVerifyingCaptcha]
);
const hasCaptchaError = !captchaToken && CAPTCHA_ENABLED === true;
return (
<>
{CAPTCHA_ENABLED && (
<div className={position === 'hidden' ? 'sr-only' : 'mb-4'}>
<ReCAPTCHA
ref={recaptchaRef}
sitekey={CAPTCHA_SITE_KEY}
onChange={onCaptchaChange}
onExpired={onCaptchaExpired}
theme={theme}
size={size}
/>
{hasCaptchaError && (
<div className="text-red-500 text-sm mt-1">
{errorMessage}
</div>
)}
</div>
)}
<WrappedComponent
{...props}
submitWithCaptcha={submitWithCaptcha}
hasCaptchaError={hasCaptchaError}
isVerifyingCaptcha={isVerifyingCaptcha}
resetCaptcha={resetCaptcha}
/>
</>
);
};
// Display name for debugging
const wrappedComponentName =
WrappedComponent.displayName ||
WrappedComponent.name ||
'Component';
WithCaptchaComponent.displayName = `WithCaptcha(${wrappedComponentName})`;
return WithCaptchaComponent;
};
export default withCaptcha;

View file

@ -10,7 +10,15 @@ metadata:
{{- include "assist.labels" . | nindent 4 }}
annotations:
nginx.ingress.kubernetes.io/rewrite-target: /$1
nginx.ingress.kubernetes.io/upstream-hash-by: $http_x_forwarded_for
nginx.ingress.kubernetes.io/configuration-snippet: |
# Extract sessionID from peerId using regex
if ($arg_peerId ~ ".*-(?<extracted_sid>[^-]+)-.*") {
set $session_id $extracted_sid;
}
add_header X-Debug-Session-ID $session_id;
add_header X-Debug-Session-Type "wss";
nginx.ingress.kubernetes.io/upstream-hash-by: $session_id
{{- with .Values.ingress.annotations }}
{{- toYaml . | nindent 4 }}
{{- end }}

View file

@ -18,4 +18,4 @@ version: 0.1.10
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
AppVersion: "v1.22.0"
AppVersion: "v1.22.1"

View file

@ -0,0 +1,24 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0';
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;
-- The full list of event-properties (used to tell which property belongs to which event)
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name);

View file

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
@ -515,6 +515,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name);
-- The full list of event-properties (used to tell which property belongs to which event)
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name);
-- The full list of properties (events and users)
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
@ -532,3 +543,15 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_properties
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
project_id UInt16,
user_id UInt32,
session_id UInt64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -0,0 +1,30 @@
\set previous_version 'v1.22.0'
\set next_version 'v1.23.0'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -1,4 +1,4 @@
\set or_version 'v1.22.0'
\set or_version 'v1.23.0'
SET client_min_messages TO NOTICE;
\set ON_ERROR_STOP true
SELECT EXISTS (SELECT 1

View file

@ -0,0 +1,6 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
DROP TABLE IF EXISTS experimental.user_viewed_sessions;
DROP TABLE IF EXISTS product_analytics.event_properties;

View file

@ -0,0 +1,27 @@
\set previous_version 'v1.23.0'
\set next_version 'v1.22.0'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -42,7 +42,7 @@ up to date with every new library you use.
| elasticsearch-py | Apache2 | Python |
| jira | BSD2 | Python |
| redis-py | MIT | Python |
| clickhouse-driver | MIT | Python |
| clickhouse-connect | Apache2 | Python |
| python3-saml | MIT | Python |
| kubernetes | Apache2 | Python |
| chalice | Apache2 | Python |

View file

@ -1,3 +1,12 @@
## 11.0.2
- add sessionId header on socket.connect for sticky sessions
## 11.0.1
- fixed rare issue causing videocam feed to be black during calls
- new call widget url to prepare for multi-user calls
## 11.0.0
- migrate to native webrtc, remove peerjs

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-assist",
"description": "Tracker plugin for screen assistance through the WebRTC",
"version": "11.0.0",
"version": "11.0.2-beta.1",
"keywords": [
"WebRTC",
"assistance",

View file

@ -238,7 +238,10 @@ export default class Assist {
...this.app.getSessionInfo(),
}),
},
transports: ['websocket',],
extraHeaders: {
sessionId,
},
transports: ['polling', 'websocket',],
withCredentials: true,
reconnection: true,
reconnectionAttempts: 30,
@ -548,6 +551,16 @@ export default class Assist {
}
}
const renegotiateConnection = async ({ pc, from }: { pc: RTCPeerConnection, from: string }) => {
try {
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
this.emit('webrtc_call_offer', { from, offer });
} catch (error) {
app.debug.error("Error with renegotiation:", error);
}
};
const handleIncomingCallOffer = async (from: string, offer: RTCSessionDescriptionInit) => {
app.debug.log('handleIncomingCallOffer', from)
let confirmAnswer: Promise<boolean>
@ -572,56 +585,59 @@ export default class Assist {
try {
// waiting for a decision on accepting the challenge
const agreed = await confirmAnswer
const agreed = await confirmAnswer;
// if rejected, then terminate the call
if (!agreed) {
initiateCallEnd()
this.options.onCallDeny?.()
return
}
if (!callUI) {
callUI = new CallWindow(app.debug.error, this.options.callUITemplate)
callUI.setVideoToggleCallback((args: { enabled: boolean }) =>
this.emit('videofeed', { streamId: from, enabled: args.enabled })
);
}
// show buttons in the call window
callUI.showControls(initiateCallEnd)
if (!annot) {
annot = new AnnotationCanvas()
annot.mount()
}
// callUI.setLocalStreams(Object.values(lStreams))
try {
// if there are no local streams in lStrems then we set
if (!lStreams[from]) {
app.debug.log('starting new stream for', from)
// request a local stream, and set it to lStreams
lStreams[from] = await RequestLocalStream()
}
// we pass the received tracks to Call ui
callUI.setLocalStreams(Object.values(lStreams))
} catch (e) {
app.debug.error('Error requesting local stream', e);
// if something didn't work out, we terminate the call
initiateCallEnd();
this.options.onCallDeny?.();
return;
}
// create a new RTCPeerConnection with ice server config
// create a new RTCPeerConnection with ice server config
const pc = new RTCPeerConnection({
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
});
// get all local tracks and add them to RTCPeerConnection
lStreams[from].stream.getTracks().forEach(track => {
pc.addTrack(track, lStreams[from].stream);
});
if (!callUI) {
callUI = new CallWindow(app.debug.error, this.options.callUITemplate);
callUI.setVideoToggleCallback((args: { enabled: boolean }) => {
this.emit("videofeed", { streamId: from, enabled: args.enabled })
});
}
// show buttons in the call window
callUI.showControls(initiateCallEnd);
if (!annot) {
annot = new AnnotationCanvas();
annot.mount();
}
// callUI.setLocalStreams(Object.values(lStreams))
try {
// if there are no local streams in lStrems then we set
if (!lStreams[from]) {
app.debug.log("starting new stream for", from);
// request a local stream, and set it to lStreams
lStreams[from] = await RequestLocalStream(pc, renegotiateConnection.bind(null, { pc, from }));
}
// we pass the received tracks to Call ui
callUI.setLocalStreams(Object.values(lStreams));
} catch (e) {
app.debug.error("Error requesting local stream", e);
// if something didn't work out, we terminate the call
initiateCallEnd();
return;
}
// get all local tracks and add them to RTCPeerConnection
// When we receive local ice candidates, we emit them via socket
pc.onicecandidate = (event) => {
if (event.candidate) {
socket.emit('webrtc_call_ice_candidate', { from, candidate: event.candidate });
socket.emit("webrtc_call_ice_candidate", {
from,
candidate: event.candidate,
});
}
};
@ -632,9 +648,9 @@ export default class Assist {
callUI.addRemoteStream(rStream, from);
const onInteraction = () => {
callUI?.playRemote();
document.removeEventListener('click', onInteraction);
document.removeEventListener("click", onInteraction);
};
document.addEventListener('click', onInteraction);
document.addEventListener("click", onInteraction);
}
};
@ -648,7 +664,7 @@ export default class Assist {
// set answer as local description
await pc.setLocalDescription(answer);
// set the response as local
socket.emit('webrtc_call_answer', { from, answer });
socket.emit("webrtc_call_answer", { from, answer });
// If the state changes to an error, we terminate the call
// pc.onconnectionstatechange = () => {
@ -658,27 +674,35 @@ export default class Assist {
// };
// Update track when local video changes
lStreams[from].onVideoTrack(vTrack => {
const sender = pc.getSenders().find(s => s.track?.kind === 'video');
lStreams[from].onVideoTrack((vTrack) => {
const sender = pc.getSenders().find((s) => s.track?.kind === "video");
if (!sender) {
app.debug.warn('No video sender found')
return
app.debug.warn("No video sender found");
return;
}
sender.replaceTrack(vTrack)
})
sender.replaceTrack(vTrack);
});
// if the user closed the tab or switched, then we end the call
document.addEventListener('visibilitychange', () => {
initiateCallEnd()
})
document.addEventListener("visibilitychange", () => {
initiateCallEnd();
});
// when everything is set, we change the state to true
this.setCallingState(CallingState.True)
if (!callEndCallback) { callEndCallback = this.options.onCallStart?.() }
const callingPeerIdsNow = Array.from(this.calls.keys())
this.setCallingState(CallingState.True);
if (!callEndCallback) {
callEndCallback = this.options.onCallStart?.();
}
const callingPeerIdsNow = Array.from(this.calls.keys());
// in session storage we write down everyone with whom the call is established
sessionStorage.setItem(this.options.session_calling_peer_key, JSON.stringify(callingPeerIdsNow))
this.emit('UPDATE_SESSION', { agentIds: callingPeerIdsNow, isCallActive: true })
sessionStorage.setItem(
this.options.session_calling_peer_key,
JSON.stringify(callingPeerIdsNow)
);
this.emit("UPDATE_SESSION", {
agentIds: callingPeerIdsNow,
isCallActive: true,
});
} catch (reason) {
app.debug.log(reason);
}

View file

@ -48,7 +48,7 @@ export default class CallWindow {
}
// const baseHref = "https://static.openreplay.com/tracker-assist/test"
const baseHref = 'https://static.openreplay.com/tracker-assist/4.0.0'
const baseHref = 'https://static.openreplay.com/tracker-assist/widget'
// this.load = fetch(this.callUITemplate || baseHref + '/index2.html')
this.load = fetch(this.callUITemplate || baseHref + '/index.html')
.then((r) => r.text())
@ -60,7 +60,7 @@ export default class CallWindow {
}, 0)
//iframe.style.height = doc.body.scrollHeight + 'px';
//iframe.style.width = doc.body.scrollWidth + 'px';
this.adjustIframeSize()
this.adjustIframeSize()
iframe.onload = null
}
// ?
@ -152,15 +152,6 @@ export default class CallWindow {
if (this.checkRemoteVideoInterval) {
clearInterval(this.checkRemoteVideoInterval)
} // just in case
let enabled = false
this.checkRemoteVideoInterval = setInterval(() => {
const settings = this.remoteVideo?.getSettings()
const isDummyVideoTrack = !this.remoteVideo.enabled || (!!settings && (settings.width === 2 || settings.frameRate === 0))
const shouldBeEnabled = !isDummyVideoTrack
if (enabled !== shouldBeEnabled) {
this.toggleRemoteVideoUI((enabled = shouldBeEnabled))
}
}, 1000)
}
// Audio

View file

@ -1,88 +1,86 @@
declare global {
interface HTMLCanvasElement {
captureStream(frameRate?: number): MediaStream;
}
}
function dummyTrack(): MediaStreamTrack {
const canvas = document.createElement('canvas')//, { width: 0, height: 0})
canvas.setAttribute('data-openreplay-hidden', '1')
canvas.width=canvas.height=2 // Doesn't work when 1 (?!)
const ctx = canvas.getContext('2d')
ctx?.fillRect(0, 0, canvas.width, canvas.height)
requestAnimationFrame(function draw(){
ctx?.fillRect(0,0, canvas.width, canvas.height)
requestAnimationFrame(draw)
})
// Also works. Probably it should be done once connected.
//setTimeout(() => { ctx?.fillRect(0,0, canvas.width, canvas.height) }, 4000)
return canvas.captureStream(60).getTracks()[0]
}
export default function RequestLocalStream(): Promise<LocalStream> {
return navigator.mediaDevices.getUserMedia({ audio:true, })
.then(aStream => {
const aTrack = aStream.getAudioTracks()[0]
if (!aTrack) { throw new Error('No audio tracks provided') }
return new _LocalStream(aTrack)
})
export default function RequestLocalStream(
pc: RTCPeerConnection,
toggleVideoCb?: () => void
): Promise<LocalStream> {
return navigator.mediaDevices
.getUserMedia({ audio: true, video: false })
.then((stream) => {
const aTrack = stream.getAudioTracks()[0];
if (!aTrack) {
throw new Error("No audio tracks provided");
}
stream.getTracks().forEach((track) => {
pc.addTrack(track, stream);
});
return new _LocalStream(stream, pc, toggleVideoCb);
});
}
class _LocalStream {
private mediaRequested = false
readonly stream: MediaStream
private readonly vdTrack: MediaStreamTrack
constructor(aTrack: MediaStreamTrack) {
this.vdTrack = dummyTrack()
this.stream = new MediaStream([ aTrack, this.vdTrack, ])
private mediaRequested = false;
readonly stream: MediaStream;
readonly vTrack: MediaStreamTrack;
readonly pc: RTCPeerConnection;
readonly toggleVideoCb?: () => void;
constructor(stream: MediaStream, pc: RTCPeerConnection, toggleVideoCb?: () => void) {
this.stream = stream;
this.pc = pc;
this.toggleVideoCb = toggleVideoCb;
}
toggleVideo(): Promise<boolean> {
const videoTracks = this.stream.getVideoTracks();
if (!this.mediaRequested) {
return navigator.mediaDevices.getUserMedia({video:true,})
.then(vStream => {
const vTrack = vStream.getVideoTracks()[0]
if (!vTrack) {
throw new Error('No video track provided')
}
this.stream.addTrack(vTrack)
this.stream.removeTrack(this.vdTrack)
this.mediaRequested = true
if (this.onVideoTrackCb) {
this.onVideoTrackCb(vTrack)
}
return true
})
.catch(e => {
// TODO: log
console.error(e)
return false
})
return navigator.mediaDevices
.getUserMedia({ video: true })
.then((vStream) => {
const vTrack = vStream.getVideoTracks()[0];
if (!vTrack) {
throw new Error("No video track provided");
}
this.pc.addTrack(vTrack, this.stream);
this.stream.addTrack(vTrack);
if (this.toggleVideoCb) {
this.toggleVideoCb();
}
this.mediaRequested = true;
if (this.onVideoTrackCb) {
this.onVideoTrackCb(vTrack);
}
return true;
})
.catch((e) => {
// TODO: log
return false;
});
} else {
videoTracks.forEach((track) => {
track.enabled = !track.enabled;
});
}
let enabled = true
this.stream.getVideoTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return Promise.resolve(enabled)
return Promise.resolve(videoTracks[0].enabled);
}
toggleAudio(): boolean {
let enabled = true
this.stream.getAudioTracks().forEach(track => {
track.enabled = enabled = enabled && !track.enabled
})
return enabled
let enabled = true;
this.stream.getAudioTracks().forEach((track) => {
track.enabled = enabled = enabled && !track.enabled;
});
return enabled;
}
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null;
onVideoTrack(cb: (t: MediaStreamTrack) => void) {
this.onVideoTrackCb = cb
this.onVideoTrackCb = cb;
}
stop() {
this.stream.getTracks().forEach(t => t.stop())
this.stream.getTracks().forEach((t) => t.stop());
}
}
export type LocalStream = InstanceType<typeof _LocalStream>
export type LocalStream = InstanceType<typeof _LocalStream>;

View file

@ -1 +1 @@
export const pkgVersion = "11.0.0";
export const pkgVersion = "11.0.2-beta.1";

View file

@ -1,3 +1,7 @@
## 16.0.2
- fix attributeSender key generation to prevent calling native methods on objects
## 16.0.1
- drop computing ts digits

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "16.0.1",
"version": "16.0.2",
"keywords": [
"logging",
"replay"

View file

@ -848,8 +848,7 @@ export default class App {
* */
private _nCommit(): void {
if (this.socketMode) {
this.messages.unshift(TabData(this.session.getTabId()))
this.messages.unshift(Timestamp(this.timestamp()))
this.messages.unshift(Timestamp(this.timestamp()), TabData(this.session.getTabId()))
this.commitCallbacks.forEach((cb) => cb(this.messages))
this.messages.length = 0
return
@ -874,8 +873,7 @@ export default class App {
try {
requestIdleCb(() => {
this.messages.unshift(TabData(this.session.getTabId()))
this.messages.unshift(Timestamp(this.timestamp()))
this.messages.unshift(Timestamp(this.timestamp()), TabData(this.session.getTabId()))
this.worker?.postMessage(this.messages)
this.commitCallbacks.forEach((cb) => cb(this.messages))
this.messages.length = 0
@ -900,10 +898,9 @@ export default class App {
private _cStartCommit(): void {
this.coldStartCommitN += 1
if (this.coldStartCommitN === 2) {
this.bufferedMessages1.push(Timestamp(this.timestamp()))
this.bufferedMessages1.push(TabData(this.session.getTabId()))
this.bufferedMessages2.push(Timestamp(this.timestamp()))
this.bufferedMessages2.push(TabData(this.session.getTabId()))
const payload = [Timestamp(this.timestamp()), TabData(this.session.getTabId())]
this.bufferedMessages1.push(...payload)
this.bufferedMessages2.push(...payload)
this.coldStartCommitN = 0
}
}

View file

@ -15,7 +15,9 @@ export class StringDictionary {
getKey = (str: string): [number, boolean] => {
let isNew = false
if (!this.backDict[str]) {
// avoiding potential native object properties
const safeKey = `__${str}`
if (!this.backDict[safeKey]) {
isNew = true
// shaving the first 2 digits of the timestamp (since they are irrelevant for next millennia)
const shavedTs = Date.now() % 10 ** (13 - 2)
@ -26,10 +28,10 @@ export class StringDictionary {
} else {
this.lastSuffix = 1
}
this.backDict[str] = id
this.backDict[safeKey] = id
this.lastTs = shavedTs
}
return [this.backDict[str], isNew]
return [this.backDict[safeKey], isNew]
}
}