Compare commits
138 commits
main
...
assist_fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9585a0f4f | ||
|
|
af7b46516f | ||
|
|
05e0306823 | ||
|
|
77a8371543 | ||
|
|
61ae3e7faa | ||
|
|
e4406ad26b | ||
|
|
9776443504 | ||
|
|
1946d0f285 | ||
|
|
a8971d842b | ||
|
|
c003057cf0 | ||
|
|
586472c7dd | ||
|
|
ecb192f16e | ||
|
|
6dc585417f | ||
|
|
264444c92a | ||
|
|
b2fcd7094b | ||
|
|
f3b98dad8a | ||
|
|
c27213c65d | ||
|
|
f61c5e99b5 | ||
|
|
6412f14b08 | ||
|
|
0a620c6ba3 | ||
|
|
685741f039 | ||
|
|
4ee78e1a5c | ||
|
|
77735d9d72 | ||
|
|
e3065e0530 | ||
|
|
d9d4221ad3 | ||
|
|
0bbde3e75a | ||
|
|
7dec8bb943 | ||
|
|
c6a5ed6c3b | ||
|
|
99d62fa549 | ||
|
|
c0bb05bc0f | ||
|
|
70258e5c1d | ||
|
|
6ec146b24b | ||
|
|
9f464e3b41 | ||
|
|
e95bdab478 | ||
|
|
421b3d1dc5 | ||
|
|
437a25fb97 | ||
|
|
cb55a17227 | ||
|
|
9d160abda5 | ||
|
|
3758cf6565 | ||
|
|
9db5e2a8f7 | ||
|
|
e0dba41065 | ||
|
|
8fbaf25799 | ||
|
|
65072f607f | ||
|
|
cb4bf932c4 | ||
|
|
20b938365c | ||
|
|
8e68ebd52b | ||
|
|
293382ea85 | ||
|
|
ac35bf5179 | ||
|
|
eb610d1c21 | ||
|
|
ac0ccb2169 | ||
|
|
20a57d7ca1 | ||
|
|
856e716507 | ||
|
|
bb17f672fe | ||
|
|
d087736df0 | ||
|
|
ce546bcfa3 | ||
|
|
9f681aca45 | ||
|
|
0500f30d14 | ||
|
|
ec2c42c688 | ||
|
|
7f0bc100f5 | ||
|
|
522a985ef3 | ||
|
|
634d0e8a0f | ||
|
|
28b4fc7598 | ||
|
|
0d4c256ca8 | ||
|
|
35f63a8fb1 | ||
|
|
a4e96822ed | ||
|
|
96f984a76a | ||
|
|
5f15dfafe7 | ||
|
|
b9cca6b388 | ||
|
|
712f07988e | ||
|
|
08bddb3165 | ||
|
|
3efb879cdf | ||
|
|
ccf44fda70 | ||
|
|
ce525a4ccf | ||
|
|
c6299c4592 | ||
|
|
a371c79151 | ||
|
|
f59a8c24f4 | ||
|
|
8be6f63711 | ||
|
|
8ba35b1324 | ||
|
|
28dea3b225 | ||
|
|
666643a6ae | ||
|
|
4cf688f15c | ||
|
|
1e57c90449 | ||
|
|
c0678bab15 | ||
|
|
187a69a61a | ||
|
|
2e96a072e9 | ||
|
|
5a410e63b3 | ||
|
|
300a857a5c | ||
|
|
eba22e0efa | ||
|
|
664f6b9014 | ||
|
|
5bbd7cff10 | ||
|
|
6f172d4f01 | ||
|
|
829e1c8bde | ||
|
|
e7d309dadf | ||
|
|
4bac12308a | ||
|
|
2aba1d9a52 | ||
|
|
1f4e32e4f2 | ||
|
|
49f98967d6 | ||
|
|
356fa02094 | ||
|
|
a8e47e59ad | ||
|
|
c760d29fb4 | ||
|
|
d77a518cf0 | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
249 changed files with 9394 additions and 5155 deletions
|
|
@ -47,6 +47,7 @@ runs:
|
|||
"JWT_SECRET:.global.jwtSecret"
|
||||
"JWT_SPOT_REFRESH_SECRET:.chalice.env.JWT_SPOT_REFRESH_SECRET"
|
||||
"JWT_SPOT_SECRET:.global.jwtSpotSecret"
|
||||
"JWT_SECRET:.global.tokenSecret"
|
||||
"LICENSE_KEY:.global.enterpriseEditionLicense"
|
||||
"MINIO_ACCESS_KEY:.global.s3.accessKey"
|
||||
"MINIO_SECRET_KEY:.global.s3.secretKey"
|
||||
|
|
|
|||
18
.github/workflows/tracker-tests.yaml
vendored
18
.github/workflows/tracker-tests.yaml
vendored
|
|
@ -22,22 +22,14 @@ jobs:
|
|||
- name: Cache tracker modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Cache tracker-assist modules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: tracker/tracker-assist/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
|
||||
path: tracker/node_modules
|
||||
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }}
|
||||
restore-keys: |
|
||||
test_tracker_build{{ runner.OS }}-build-
|
||||
test_tracker_build{{ runner.OS }}-
|
||||
- name: Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker
|
||||
cd tracker
|
||||
bun install
|
||||
- name: Jest tests
|
||||
run: |
|
||||
|
|
@ -47,10 +39,6 @@ jobs:
|
|||
run: |
|
||||
cd tracker/tracker
|
||||
bun run build
|
||||
- name: (TA) Setup Testing packages
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
bun install
|
||||
- name: (TA) Jest tests
|
||||
run: |
|
||||
cd tracker/tracker-assist
|
||||
|
|
|
|||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -7,3 +7,4 @@ node_modules
|
|||
**/*.envrc
|
||||
.idea
|
||||
*.mob*
|
||||
install-state.gz
|
||||
|
|
|
|||
11
api/Pipfile
11
api/Pipfile
|
|
@ -6,16 +6,15 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.21"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import pg_client, ch_client
|
||||
from crons import core_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||
app.include_router(product_analytics.app, prefix="/pa")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||
|
|
|
|||
|
|
@ -241,3 +241,25 @@ def get_colname_by_key(project_id, key):
|
|||
return None
|
||||
|
||||
return index_to_colname(meta_keys[key])
|
||||
|
||||
|
||||
def get_for_filters(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT {",".join(column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
LIMIT 1;""", {"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
metas = cur.fetchone()
|
||||
results = []
|
||||
if metas is not None:
|
||||
for i, k in enumerate(metas.keys()):
|
||||
if metas[k] is not None:
|
||||
results.append({"id": f"meta_{i}",
|
||||
"name": k,
|
||||
"displayName": metas[k],
|
||||
"possibleTypes": ["String"],
|
||||
"autoCaptured": False,
|
||||
"icon": None})
|
||||
return {"total": len(results), "list": results}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
||||
def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
|
||||
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
|
||||
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
|
||||
|
|
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
|
|||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
|
||||
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
|
||||
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ s.start_ts,
|
|||
s.duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -24,8 +24,9 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
|
|||
"main_events.`$event_name` = 'CLICK'",
|
||||
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
|
||||
]
|
||||
|
||||
if data.operator == schemas.SearchEventOperator.IS:
|
||||
if data.operator == schemas.SearchEventOperator.PATTERN:
|
||||
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)")
|
||||
elif data.operator == schemas.SearchEventOperator.IS:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
|
||||
else:
|
||||
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
|
||||
|
|
@ -179,7 +180,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
|
|||
s.duration AS duration"""
|
||||
|
||||
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
|
||||
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
|
||||
start_time: int,
|
||||
end_time: int) -> str | None:
|
||||
full_args = {
|
||||
|
|
@ -262,13 +263,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
|
|||
value=[schemas.PlatformType.DESKTOP],
|
||||
operator=schemas.SearchEventOperator.IS))
|
||||
if not location_condition:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
if no_click:
|
||||
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
|
||||
value=[],
|
||||
operator=schemas.SearchEventOperator.IS_ANY))
|
||||
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
|
||||
value=[0],
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
|
|||
:return:
|
||||
"""
|
||||
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
|
||||
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
|
||||
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
|
||||
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
|
||||
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
|
||||
platform = project.platform
|
||||
constraints = ["e.project_id = %(project_id)s",
|
||||
|
|
|
|||
|
|
@ -85,6 +85,9 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
|
|||
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
|
||||
# if start-point is selected, the selected event is ranked n°1
|
||||
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
||||
if not data.hide_excess:
|
||||
data.hide_excess = True
|
||||
data.rows = 50
|
||||
sub_events = []
|
||||
start_points_conditions = []
|
||||
step_0_conditions = []
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
|
||||
|
||||
def search_events(project_id: int, data: dict):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT *
|
||||
FROM taha.events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY created_at;""",
|
||||
params={"project_id": project_id})
|
||||
x = ch_client.execute(r)
|
||||
|
||||
return x
|
||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
139
api/chalicelib/core/product_analytics/events.py
Normal file
139
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_events(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT DISTINCT ON(event_name,auto_captured)
|
||||
COUNT(1) OVER () AS total,
|
||||
event_name AS name, display_name, description,
|
||||
auto_captured
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY auto_captured,display_name
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["icon"] = None
|
||||
row["possibleTypes"] = ["string"]
|
||||
row.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(rows)}
|
||||
|
||||
|
||||
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||
|
||||
constraints = ["project_id = %(projectId)s",
|
||||
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||
ev_constraints = []
|
||||
for i, f in enumerate(data.filters):
|
||||
if not f.is_event:
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
f_k = f"f_value{i}"
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
is_any = sh.isAny_opreator(f.operator)
|
||||
is_undefined = sh.isUndefined_operator(f.operator)
|
||||
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||
if f.is_predefined:
|
||||
column = f.name
|
||||
else:
|
||||
column = f"properties.{f.name}"
|
||||
|
||||
if is_any:
|
||||
condition = f"notEmpty{column})"
|
||||
elif is_undefined:
|
||||
condition = f"empty({column})"
|
||||
else:
|
||||
condition = sh.multi_conditions(
|
||||
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
|
||||
values=f.value, value_key=f_k)
|
||||
constraints.append(condition)
|
||||
|
||||
else:
|
||||
e_k = f"e_value{i}"
|
||||
full_args = {**full_args, e_k: f.name}
|
||||
condition = f"`$event_name` = %({e_k})s"
|
||||
sub_conditions = []
|
||||
for j, ef in enumerate(f.properties.filters):
|
||||
p_k = f"e_{i}_p_{j}"
|
||||
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k)}
|
||||
if ef.is_predefined:
|
||||
sub_condition = get_sub_condition(col_name=ef.name, val_name=p_k, operator=ef.operator)
|
||||
else:
|
||||
sub_condition = get_sub_condition(col_name=f"properties.{ef.name}",
|
||||
val_name=p_k, operator=ef.operator)
|
||||
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
|
||||
if len(sub_conditions) > 0:
|
||||
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
|
||||
|
||||
ev_constraints.append(condition)
|
||||
|
||||
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||
query = ch_client.format(
|
||||
f"""SELECT COUNT(1) OVER () AS total,
|
||||
event_id,
|
||||
`$event_name`,
|
||||
created_at,
|
||||
`distinct_id`,
|
||||
`$browser`,
|
||||
`$import`,
|
||||
`$os`,
|
||||
`$country`,
|
||||
`$state`,
|
||||
`$city`,
|
||||
`$screen_height`,
|
||||
`$screen_width`,
|
||||
`$source`,
|
||||
`$user_id`,
|
||||
`$device`
|
||||
FROM product_analytics.events
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY created_at
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters=full_args)
|
||||
rows = ch_client.execute(query)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "rows": [], "src": 2}
|
||||
total = rows[0]["total"]
|
||||
for r in rows:
|
||||
r.pop("total")
|
||||
return {"total": total, "rows": rows, "src": 2}
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total,
|
||||
all_events.event_name AS name,
|
||||
*
|
||||
FROM product_analytics.all_events
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
|
||||
rows = ch_client.execute(r)
|
||||
if len(rows) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = rows[0]["total"]
|
||||
for i, row in enumerate(rows):
|
||||
row["id"] = f"event_{i}"
|
||||
row["icon"] = None
|
||||
row["possibleTypes"] = ["string"]
|
||||
row.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(rows)}
|
||||
83
api/chalicelib/core/product_analytics/properties.py
Normal file
83
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils.ch_client import ClickHouseClient
|
||||
import schemas
|
||||
|
||||
|
||||
def get_all_properties(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total,
|
||||
property_name AS name, display_name,
|
||||
array_agg(DISTINCT event_properties.value_type) AS possible_types
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
|
||||
WHERE all_properties.project_id=%(project_id)s
|
||||
GROUP BY property_name,display_name
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
properties = helper.list_to_camel_case(properties)
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["icon"] = None
|
||||
p["possibleTypes"] = exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])
|
||||
p.pop("total")
|
||||
return {"total": total, "list": properties}
|
||||
|
||||
|
||||
def get_event_properties(project_id: int, event_name):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT all_properties.property_name,
|
||||
all_properties.display_name
|
||||
FROM product_analytics.event_properties
|
||||
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||
WHERE event_properties.project_id=%(project_id)s
|
||||
AND all_properties.project_id=%(project_id)s
|
||||
AND event_properties.event_name=%(event_name)s
|
||||
ORDER BY created_at;""",
|
||||
parameters={"project_id": project_id, "event_name": event_name})
|
||||
properties = ch_client.execute(r)
|
||||
|
||||
return helper.list_to_camel_case(properties)
|
||||
|
||||
|
||||
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
|
||||
with ClickHouseClient() as ch_client:
|
||||
r = ch_client.format(
|
||||
"""SELECT COUNT(1) OVER () AS total,
|
||||
all_properties.property_name AS name,
|
||||
all_properties.*,
|
||||
possible_types.values AS possible_types,
|
||||
possible_values.values AS sample_values
|
||||
FROM product_analytics.all_properties
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS values
|
||||
FROM product_analytics.event_properties
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_types
|
||||
USING (project_id, property_name)
|
||||
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS values
|
||||
FROM product_analytics.property_values_samples
|
||||
WHERE project_id=%(project_id)s
|
||||
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
|
||||
WHERE project_id=%(project_id)s
|
||||
ORDER BY display_name
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
parameters={"project_id": project_id,
|
||||
"limit": page.limit,
|
||||
"offset": (page.page - 1) * page.limit})
|
||||
properties = ch_client.execute(r)
|
||||
if len(properties) == 0:
|
||||
return {"total": 0, "list": []}
|
||||
total = properties[0]["total"]
|
||||
for i, p in enumerate(properties):
|
||||
p["id"] = f"prop_{i}"
|
||||
p["icon"] = None
|
||||
p.pop("total")
|
||||
return {"total": total, "list": helper.list_to_camel_case(properties)}
|
||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
|||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search_pg
|
||||
from . import sessions_search_pg as sessions_search_legacy
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_ch as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search_pg as sessions_search
|
||||
|
||||
# if config("EXP_METRICS", cast=bool, default=False):
|
||||
# from . import sessions_ch as sessions
|
||||
# else:
|
||||
# from . import sessions_pg as sessions
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from chalicelib.core import events, metadata
|
|||
from . import performance_event, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.utils.exp_ch_helper import get_sub_condition
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -48,8 +49,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
query = f"""SELECT gs.generate_series AS timestamp,
|
||||
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
|
||||
s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
|
||||
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id,
|
||||
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id),
|
||||
s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
|
||||
s.datetime AS datetime
|
||||
{query_part}) AS processed_sessions ON(TRUE)
|
||||
|
|
@ -148,7 +149,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -173,7 +174,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -253,7 +254,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
FROM (SELECT s.user_id AS user_id {extra_col}
|
||||
{query_part}
|
||||
WHERE isNotNull(user_id)
|
||||
AND user_id != '') AS filtred_sessions
|
||||
AND notEmpty(user_id)) AS filtred_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY total DESC
|
||||
|
|
@ -277,7 +278,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -330,7 +331,11 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
|
|||
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
|
||||
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
else:
|
||||
condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
|
||||
condition = get_sub_condition(
|
||||
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
|
||||
val_name=value_key, operator=op
|
||||
)
|
||||
|
||||
conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
|
||||
|
||||
|
|
@ -660,7 +665,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event.value = helper.values_for_operator(value=event.value, op=event.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(event.value, value_key=e_k),
|
||||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
**sh.multi_values(event.source, value_key=s_k),
|
||||
e_k: event.value[0] if len(event.value) > 0 else event.value}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
|
|
@ -671,24 +677,44 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||
event_where.append(json_condition(
|
||||
"main",
|
||||
"$properties",
|
||||
"selector", op, event.value, e_k)
|
||||
# event_where.append(json_condition(
|
||||
# "main",
|
||||
# "$properties",
|
||||
# "selector", op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.selector",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(json_condition(
|
||||
"sub", "$properties", _column, op, event.value, e_k
|
||||
))
|
||||
# event_where.append(json_condition(
|
||||
# "sub", "$properties", _column, op, event.value, e_k
|
||||
# ))
|
||||
event_where.append(
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
|
||||
}
|
||||
)
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
# event_where.append(
|
||||
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
# )
|
||||
event_where.append(
|
||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||
sh.multi_conditions(
|
||||
get_sub_condition(col_name=f"main.`$properties`.{_column}",
|
||||
val_name=e_k, operator=event.operator),
|
||||
event.value, value_key=e_k)
|
||||
)
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
|
|
@ -870,12 +896,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
events_conditions[-1]["condition"] = []
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
sh.multi_conditions(
|
||||
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||
value_key=s_k))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
events_extra_join += f" AND {event_where[-1]}"
|
||||
|
||||
|
|
@ -1191,8 +1220,35 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
else:
|
||||
logging.warning(f"undefined GRAPHQL filter: {f.type}")
|
||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||
elif event_type == schemas.EventType.EVENT:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
_column = events.EventType.CLICK.column
|
||||
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
|
||||
events_conditions.append({"type": event_where[-1], "condition": ""})
|
||||
|
||||
else:
|
||||
continue
|
||||
if event.properties is not None and len(event.properties.filters) > 0:
|
||||
sub_conditions = []
|
||||
for l, property in enumerate(event.properties.filters):
|
||||
a_k = f"{e_k}_att_{l}"
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(property.value, value_key=a_k)}
|
||||
|
||||
if property.is_predefined:
|
||||
condition = get_sub_condition(col_name=f"main.{property.name}",
|
||||
val_name=a_k, operator=property.operator)
|
||||
else:
|
||||
condition = get_sub_condition(col_name=f"main.properties.{property.name}",
|
||||
val_name=a_k, operator=property.operator)
|
||||
event_where.append(
|
||||
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||
)
|
||||
sub_conditions.append(event_where[-1])
|
||||
if len(sub_conditions) > 0:
|
||||
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
|
||||
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
|
||||
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
|
||||
if event_index == 0 or or_events:
|
||||
event_where += ss_constraints
|
||||
if is_not:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
|
|
@ -219,7 +218,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
}
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
@ -143,7 +143,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.LOCATION:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -160,7 +160,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
for e in data.events:
|
||||
if e.type == schemas.EventType.REQUEST_DETAILS:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
|
|
@ -273,7 +273,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
return sessions
|
||||
|
||||
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
||||
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
|
||||
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
|
||||
schemas.EventType.GRAPHQL] \
|
||||
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
) AS users_sessions;""",
|
||||
full_args)
|
||||
elif ids_only:
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
|
||||
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
|
||||
|
|
@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
ORDER BY sort_key {data.order}
|
||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||
parameters=full_args)
|
||||
logging.debug("--------------------")
|
||||
logging.debug(main_query)
|
||||
logging.debug("--------------------")
|
||||
|
||||
try:
|
||||
logging.debug("--------------------")
|
||||
sessions_list = cur.execute(main_query)
|
||||
logging.debug("--------------------")
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query)
|
||||
|
|
@ -262,7 +262,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
FROM public.user_favorite_sessions
|
||||
WHERE user_favorite_sessions.user_id = %(userId)s
|
||||
) AS favorite_sessions USING (session_id)
|
||||
WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
|
||||
WHERE s.project_id = %(id)s AND isNotNull(s.duration) AND s.{col_name} = %(value)s
|
||||
) AS full_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC
|
||||
LIMIT 10
|
||||
|
|
@ -122,7 +122,10 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
|||
sort = 'session_id'
|
||||
if data.sort is not None and data.sort != "session_id":
|
||||
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
if data.sort == 'datetime':
|
||||
sort = 'start_ts'
|
||||
else:
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
|
||||
meta_keys = metadata.get(project_id=project.project_id)
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
|
||||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
|||
logger.info("valid SMTP configuration found")
|
||||
else:
|
||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||
|
||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
||||
logging.info(">>> Using new CH driver")
|
||||
from . import ch_client_exp as ch_client
|
||||
else:
|
||||
from . import ch_client
|
||||
|
|
|
|||
|
|
@ -1,73 +1,185 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_driver
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
if kwargs.get("parameters"):
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||
else:
|
||||
logger.debug(
|
||||
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
elif len(args) > 0:
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
logger.debug(args[0])
|
||||
else:
|
||||
logger.debug(str.encode(args[0]))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
user=config("ch_user", default="default"),
|
||||
password=config("ch_password", default=""),
|
||||
port=config("ch_port", cast=int),
|
||||
settings=settings,
|
||||
**extra_args) \
|
||||
if self.__client is None else self.__client
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def execute(self, query, parameters=None, **args):
|
||||
try:
|
||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
||||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logger.error("--------------------")
|
||||
raise err
|
||||
|
||||
def insert(self, query, params=None, **args):
|
||||
return self.__client.execute(query=query, params=params, **args)
|
||||
|
||||
def client(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, parameters):
|
||||
if parameters is None:
|
||||
return query
|
||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
||||
def format(self, query, parameters=None):
|
||||
if parameters:
|
||||
ctx = QueryContext(query=query, parameters=parameters)
|
||||
return ctx.final_query
|
||||
return query
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">CH_POOL:not defined")
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
pass
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
import logging
|
||||
import threading
|
||||
import time
|
||||
from functools import wraps
|
||||
from queue import Queue, Empty
|
||||
|
||||
import clickhouse_connect
|
||||
from clickhouse_connect.driver.query import QueryContext
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CH_CONFIG = {"host": config("ch_host"),
|
||||
"user": config("ch_user", default="default"),
|
||||
"password": config("ch_password", default=""),
|
||||
"port": config("ch_port_http", cast=int),
|
||||
"client_name": config("APP_NAME", default="PY")}
|
||||
CH_CONFIG = dict(_CH_CONFIG)
|
||||
|
||||
settings = {}
|
||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||
|
||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||
|
||||
extra_args = {}
|
||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||
extra_args["compression"] = "lz4"
|
||||
|
||||
|
||||
def transform_result(self, original_function):
|
||||
@wraps(original_function)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||
result = original_function(*args, **kwargs)
|
||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||
column_names = result.column_names
|
||||
result = result.result_rows
|
||||
result = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClickHouseConnectionPool:
|
||||
def __init__(self, min_size, max_size):
|
||||
self.min_size = min_size
|
||||
self.max_size = max_size
|
||||
self.pool = Queue()
|
||||
self.lock = threading.Lock()
|
||||
self.total_connections = 0
|
||||
|
||||
# Initialize the pool with min_size connections
|
||||
for _ in range(self.min_size):
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.pool.put(client)
|
||||
self.total_connections += 1
|
||||
|
||||
def get_connection(self):
|
||||
try:
|
||||
# Try to get a connection without blocking
|
||||
client = self.pool.get_nowait()
|
||||
return client
|
||||
except Empty:
|
||||
with self.lock:
|
||||
if self.total_connections < self.max_size:
|
||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=config("ch_database", default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
self.total_connections += 1
|
||||
return client
|
||||
# If max_size reached, wait until a connection is available
|
||||
client = self.pool.get()
|
||||
return client
|
||||
|
||||
def release_connection(self, client):
|
||||
self.pool.put(client)
|
||||
|
||||
def close_all(self):
|
||||
with self.lock:
|
||||
while not self.pool.empty():
|
||||
client = self.pool.get()
|
||||
client.close()
|
||||
self.total_connections = 0
|
||||
|
||||
|
||||
CH_pool: ClickHouseConnectionPool = None
|
||||
|
||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||
RETRY = 0
|
||||
|
||||
|
||||
def make_pool():
|
||||
if not config('CH_POOL', cast=bool, default=True):
|
||||
return
|
||||
global CH_pool
|
||||
global RETRY
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
class ClickHouseClient:
|
||||
__client = None
|
||||
|
||||
def __init__(self, database=None):
|
||||
if self.__client is None:
|
||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||
database=database if database else config("ch_database",
|
||||
default="default"),
|
||||
settings=settings,
|
||||
**extra_args)
|
||||
|
||||
else:
|
||||
self.__client = CH_pool.get_connection()
|
||||
|
||||
self.__client.execute = transform_result(self, self.__client.query)
|
||||
self.__client.format = self.format
|
||||
|
||||
def __enter__(self):
|
||||
return self.__client
|
||||
|
||||
def format(self, query, *, parameters=None):
|
||||
if parameters is None:
|
||||
return query
|
||||
return query % {
|
||||
key: f"'{value}'" if isinstance(value, str) else value
|
||||
for key, value in parameters.items()
|
||||
}
|
||||
|
||||
def __exit__(self, *args):
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
CH_pool.release_connection(self.__client)
|
||||
else:
|
||||
self.__client.close()
|
||||
|
||||
|
||||
async def init():
|
||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||
if config('CH_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global CH_pool
|
||||
if CH_pool is not None:
|
||||
try:
|
||||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
import logging
|
||||
import re
|
||||
from typing import Union
|
||||
|
||||
import schemas
|
||||
import logging
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from schemas import SearchEventOperator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -66,3 +69,94 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
|
|||
if event_type not in defs:
|
||||
raise Exception(f"unsupported EventType:{event_type}")
|
||||
return defs.get(event_type)
|
||||
|
||||
|
||||
# AI generated
|
||||
def simplify_clickhouse_type(ch_type: str) -> str:
|
||||
"""
|
||||
Simplify a ClickHouse data type name to a broader category like:
|
||||
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
|
||||
"""
|
||||
|
||||
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
|
||||
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
|
||||
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
|
||||
while True:
|
||||
match = pattern_wrappers.match(ch_type)
|
||||
if match:
|
||||
ch_type = match.group(2)
|
||||
else:
|
||||
break
|
||||
|
||||
# 2) Normalize (lowercase) for easier checks
|
||||
normalized_type = ch_type.lower()
|
||||
|
||||
# 3) Use pattern matching or direct checks for known categories
|
||||
# (You can adapt this as you see fit for your environment.)
|
||||
|
||||
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
|
||||
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
|
||||
return "int"
|
||||
|
||||
# Floats: Float32, Float64
|
||||
if re.match(r'^float(32|64)$', normalized_type):
|
||||
return "float"
|
||||
|
||||
# Decimal: Decimal(P, S)
|
||||
if normalized_type.startswith("decimal"):
|
||||
return "decimal"
|
||||
|
||||
# Date/DateTime
|
||||
if normalized_type.startswith("date"):
|
||||
return "datetime"
|
||||
if normalized_type.startswith("datetime"):
|
||||
return "datetime"
|
||||
|
||||
# Strings: String, FixedString(N)
|
||||
if normalized_type.startswith("string"):
|
||||
return "string"
|
||||
if normalized_type.startswith("fixedstring"):
|
||||
return "string"
|
||||
|
||||
# UUID
|
||||
if normalized_type.startswith("uuid"):
|
||||
return "uuid"
|
||||
|
||||
# Enums: Enum8(...) or Enum16(...)
|
||||
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
|
||||
return "enum"
|
||||
|
||||
# Arrays: Array(T)
|
||||
if normalized_type.startswith("array"):
|
||||
return "array"
|
||||
|
||||
# Tuples: Tuple(T1, T2, ...)
|
||||
if normalized_type.startswith("tuple"):
|
||||
return "tuple"
|
||||
|
||||
# Map(K, V)
|
||||
if normalized_type.startswith("map"):
|
||||
return "map"
|
||||
|
||||
# Nested(...)
|
||||
if normalized_type.startswith("nested"):
|
||||
return "nested"
|
||||
|
||||
# If we didn't match above, just return the original type in lowercase
|
||||
return normalized_type
|
||||
|
||||
|
||||
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Takes a list of ClickHouse types and returns a list of simplified types
|
||||
by calling `simplify_clickhouse_type` on each.
|
||||
"""
|
||||
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
|
||||
|
||||
|
||||
def get_sub_condition(col_name: str, val_name: str,
|
||||
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]):
|
||||
if operator == SearchEventOperator.PATTERN:
|
||||
return f"match({col_name}, %({val_name})s)"
|
||||
op = sh.get_sql_operator(operator)
|
||||
return f"{col_name} {op} %({val_name})s"
|
||||
|
|
|
|||
|
|
@ -14,6 +14,9 @@ def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventEx
|
|||
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||
# this is not used as an operator, it is used in order to maintain a valid value for conditions
|
||||
schemas.SearchEventOperator.PATTERN: "regex",
|
||||
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||
|
|
@ -72,4 +75,3 @@ def single_value(values):
|
|||
if isinstance(v, Enum):
|
||||
values[i] = v.value
|
||||
return values
|
||||
|
||||
|
|
|
|||
|
|
@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
|
|||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ALERTS=true
|
||||
EXP_ERRORS_SEARCH=true
|
||||
EXP_METRICS=true
|
||||
EXP_METRICS=true
|
||||
EXP_SESSIONS_SEARCH=true
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
-- -- Original Q3
|
||||
-- WITH ranked_events AS (SELECT *
|
||||
-- FROM ranked_events_1736344377403),
|
||||
-- n1 AS (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM ranked_events
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- AND isNotNull(next_value)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n2 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n1
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 2
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n3 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n2
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 3
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n4 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n3
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 4
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8),
|
||||
-- n5 AS (SELECT *
|
||||
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
|
||||
-- re.event_type AS event_type,
|
||||
-- re.e_value AS e_value,
|
||||
-- re.next_type AS next_type,
|
||||
-- re.next_value AS next_value,
|
||||
-- COUNT(1) AS sessions_count
|
||||
-- FROM n4
|
||||
-- INNER JOIN ranked_events AS re
|
||||
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
|
||||
-- WHERE re.event_number_in_session = 5
|
||||
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
|
||||
-- re.next_value) AS sub_level
|
||||
-- ORDER BY sessions_count DESC
|
||||
-- LIMIT 8)
|
||||
-- SELECT *
|
||||
-- FROM (SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n1
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n2
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n3
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n4
|
||||
-- UNION ALL
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- next_type,
|
||||
-- next_value,
|
||||
-- sessions_count
|
||||
-- FROM n5) AS chart_steps
|
||||
-- ORDER BY event_number_in_session;
|
||||
|
||||
-- Q1
|
||||
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
|
||||
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
|
||||
FROM experimental.events AS events
|
||||
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
|
||||
AND events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
GROUP BY 1),
|
||||
pre_ranked_events AS (SELECT *
|
||||
FROM (SELECT session_id,
|
||||
event_type,
|
||||
datetime,
|
||||
url_path AS e_value,
|
||||
row_number() OVER (PARTITION BY session_id
|
||||
ORDER BY datetime ,
|
||||
message_id ) AS event_number_in_session
|
||||
FROM experimental.events AS events
|
||||
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
|
||||
WHERE events.project_id = toUInt16(65)
|
||||
AND events.datetime >= toDateTime(1735599600000 / 1000)
|
||||
AND events.datetime < toDateTime(1736290799999 / 1000)
|
||||
AND (events.event_type = 'LOCATION')
|
||||
AND events.datetime >= initial_event.start_event_timestamp
|
||||
) AS full_ranked_events
|
||||
WHERE event_number_in_session <= 5)
|
||||
SELECT *
|
||||
FROM pre_ranked_events);
|
||||
;
|
||||
|
||||
SELECT *
|
||||
FROM pre_ranked_events_1736344377403
|
||||
WHERE event_number_in_session < 3;
|
||||
|
||||
|
||||
|
||||
-- ---------Q2-----------
|
||||
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
|
||||
DROP TABLE ranked_events_1736344377403;
|
||||
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
|
||||
(WITH pre_ranked_events AS (SELECT *
|
||||
FROM pre_ranked_events_1736344377403),
|
||||
start_points AS (SELECT DISTINCT session_id
|
||||
FROM pre_ranked_events
|
||||
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
|
||||
AND pre_ranked_events.event_number_in_session = 1),
|
||||
ranked_events AS (SELECT pre_ranked_events.*,
|
||||
leadInFrame(e_value)
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
|
||||
leadInFrame(toNullable(event_type))
|
||||
OVER (PARTITION BY session_id ORDER BY datetime
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
|
||||
FROM start_points
|
||||
INNER JOIN pre_ranked_events USING (session_id))
|
||||
SELECT *
|
||||
FROM ranked_events);
|
||||
|
||||
|
||||
-- ranked events
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
-- AND e_value='/en/deployment/deploy-docker/'
|
||||
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
|
||||
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
|
||||
AND isNotNull(next_value)
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
|
||||
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY event_number_in_session, sessions_count DESC;
|
||||
|
||||
SELECT COUNT(1) AS sessions_count
|
||||
FROM ranked_events_1736344377403
|
||||
WHERE event_number_in_session = 2
|
||||
AND isNull(next_value)
|
||||
;
|
||||
|
||||
-- ---------Q3 MORE -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
-- TODO: make this as top_steps, where every step will go to next as top/others
|
||||
top_n1 AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n2 AS (-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE (event_type, e_value) IN (SELECT event_type,
|
||||
e_value
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
GROUP BY event_type, e_value
|
||||
ORDER BY SUM(sessions_count) DESC
|
||||
LIMIT 3)
|
||||
ORDER BY sessions_count DESC),
|
||||
top_n AS (SELECT *
|
||||
FROM top_n1
|
||||
UNION ALL
|
||||
SELECT *
|
||||
FROM top_n2),
|
||||
u_top_n AS (SELECT DISTINCT event_number_in_session,
|
||||
event_type,
|
||||
e_value
|
||||
FROM top_n),
|
||||
others_n AS (
|
||||
-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNotNull(next_type)
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNotNull(next_type)
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 1000000 OFFSET 3)
|
||||
SELECT *
|
||||
FROM (
|
||||
-- Top
|
||||
SELECT *
|
||||
FROM top_n
|
||||
-- UNION ALL
|
||||
-- -- Others
|
||||
-- SELECT event_number_in_session,
|
||||
-- event_type,
|
||||
-- e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count)
|
||||
-- FROM others_n
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Top go to Drop
|
||||
-- SELECT drop_n.event_number_in_session,
|
||||
-- drop_n.event_type,
|
||||
-- drop_n.e_value,
|
||||
-- drop_n.next_type,
|
||||
-- drop_n.next_value,
|
||||
-- drop_n.sessions_count
|
||||
-- FROM drop_n
|
||||
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
|
||||
-- AND drop_n.event_type = u_top_n.event_type
|
||||
-- AND drop_n.e_value = u_top_n.e_value)
|
||||
-- ORDER BY drop_n.event_number_in_session
|
||||
-- -- -- UNION ALL
|
||||
-- -- -- Top go to Others
|
||||
-- SELECT top_n.event_number_in_session,
|
||||
-- top_n.event_type,
|
||||
-- top_n.e_value,
|
||||
-- 'OTHER' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(top_n.sessions_count) AS sessions_count
|
||||
-- FROM top_n
|
||||
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
|
||||
-- AND top_n.next_type = others_n.event_type
|
||||
-- AND top_n.next_value = others_n.e_value)
|
||||
-- WHERE others_n.event_number_in_session IS NULL
|
||||
-- AND top_n.next_type IS NOT NULL
|
||||
-- GROUP BY event_number_in_session, event_type, e_value
|
||||
-- UNION ALL
|
||||
-- -- Others got to Top
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- others_n.s_next_type AS next_type,
|
||||
-- others_n.s_next_value AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
|
||||
-- others_n.s_next_type = top_n.event_type AND
|
||||
-- others_n.s_next_value = top_n.event_type)
|
||||
-- GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
-- UNION ALL
|
||||
-- -- TODO: find if this works or not
|
||||
-- -- Others got to Others
|
||||
-- SELECT others_n.event_number_in_session,
|
||||
-- 'OTHER' AS event_type,
|
||||
-- NULL AS e_value,
|
||||
-- 'OTHERS' AS next_type,
|
||||
-- NULL AS next_value,
|
||||
-- SUM(sessions_count) AS sessions_count
|
||||
-- FROM others_n
|
||||
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
|
||||
-- AND others_n.s_next_type = u_top_n.event_type
|
||||
-- AND others_n.s_next_value = u_top_n.e_value)
|
||||
-- WHERE u_top_n.event_number_in_session IS NULL
|
||||
-- GROUP BY others_n.event_number_in_session
|
||||
)
|
||||
ORDER BY event_number_in_session;
|
||||
|
||||
|
||||
-- ---------Q3 TOP ON VALUE ONLY -----------
|
||||
WITH ranked_events AS (SELECT *
|
||||
FROM ranked_events_1736344377403),
|
||||
n1 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 1
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n2 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 2
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
n3 AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
next_type,
|
||||
next_value,
|
||||
COUNT(1) AS sessions_count
|
||||
FROM ranked_events
|
||||
WHERE event_number_in_session = 3
|
||||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
|
||||
ORDER BY sessions_count DESC),
|
||||
|
||||
drop_n AS (-- STEP 1
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n1
|
||||
WHERE isNull(n1.next_type)
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
sessions_count
|
||||
FROM n2
|
||||
WHERE isNull(n2.next_type)),
|
||||
top_n AS (SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n1
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
LIMIT 1
|
||||
UNION ALL
|
||||
-- STEP 2
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n2
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT event_number_in_session,
|
||||
event_type,
|
||||
e_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM n3
|
||||
GROUP BY event_number_in_session, event_type, e_value
|
||||
ORDER BY sessions_count DESC
|
||||
LIMIT 3),
|
||||
top_n_with_next AS (SELECT n1.*
|
||||
FROM n1
|
||||
UNION ALL
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
|
||||
AND n2.event_type = top_n.event_type
|
||||
AND n2.e_value = top_n.e_value)),
|
||||
others_n AS (
|
||||
-- STEP 2
|
||||
SELECT n2.*
|
||||
FROM n2
|
||||
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 2)
|
||||
UNION ALL
|
||||
-- STEP 3
|
||||
SELECT n3.*
|
||||
FROM n3
|
||||
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value
|
||||
FROM top_n
|
||||
WHERE top_n.event_number_in_session = 3))
|
||||
SELECT *
|
||||
FROM (
|
||||
-- SELECT sum(top_n_with_next.sessions_count)
|
||||
-- FROM top_n_with_next
|
||||
-- WHERE event_number_in_session = 1
|
||||
-- -- AND isNotNull(next_value)
|
||||
-- AND (next_type, next_value) IN
|
||||
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
|
||||
-- -- SELECT * FROM others_n
|
||||
-- -- SELECT * FROM n2
|
||||
-- SELECT *
|
||||
-- FROM top_n
|
||||
-- );
|
||||
-- Top to Top: valid
|
||||
SELECT top_n_with_next.*
|
||||
FROM top_n_with_next
|
||||
INNER JOIN top_n
|
||||
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
|
||||
AND top_n_with_next.next_type = top_n.event_type
|
||||
AND top_n_with_next.next_value = top_n.e_value)
|
||||
UNION ALL
|
||||
-- Top to Others: valid
|
||||
SELECT top_n_with_next.event_number_in_session,
|
||||
top_n_with_next.event_type,
|
||||
top_n_with_next.e_value,
|
||||
'OTHER' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(top_n_with_next.sessions_count) AS sessions_count
|
||||
FROM top_n_with_next
|
||||
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
|
||||
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
|
||||
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
|
||||
UNION ALL
|
||||
-- Top go to Drop: valid
|
||||
SELECT drop_n.event_number_in_session,
|
||||
drop_n.event_type,
|
||||
drop_n.e_value,
|
||||
drop_n.next_type,
|
||||
drop_n.next_value,
|
||||
drop_n.sessions_count
|
||||
FROM drop_n
|
||||
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
|
||||
AND drop_n.event_type = top_n.event_type
|
||||
AND drop_n.e_value = top_n.e_value)
|
||||
ORDER BY drop_n.event_number_in_session
|
||||
UNION ALL
|
||||
-- Others got to Drop: valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'DROP' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
GROUP BY others_n.event_number_in_session, next_type, next_value
|
||||
UNION ALL
|
||||
-- Others got to Top:valid
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
others_n.next_type,
|
||||
others_n.next_value,
|
||||
SUM(others_n.sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
|
||||
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
|
||||
UNION ALL
|
||||
-- Others got to Others
|
||||
SELECT others_n.event_number_in_session,
|
||||
'OTHER' AS event_type,
|
||||
NULL AS e_value,
|
||||
'OTHERS' AS next_type,
|
||||
NULL AS next_value,
|
||||
SUM(sessions_count) AS sessions_count
|
||||
FROM others_n
|
||||
WHERE isNotNull(others_n.next_type)
|
||||
AND others_n.event_number_in_session < 3
|
||||
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
|
||||
(SELECT event_number_in_session, event_type, e_value FROM top_n)
|
||||
GROUP BY others_n.event_number_in_session)
|
||||
ORDER BY event_number_in_session, sessions_count
|
||||
DESC;
|
||||
|
||||
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
|
|
|
|||
55
api/routers/subs/product_analytics.py
Normal file
55
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core.product_analytics import events, properties
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.get('/{projectId}/filters', tags=["product_analytics"])
|
||||
def get_all_filters(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": {
|
||||
"events": events.get_events(project_id=projectId, page=filter_query),
|
||||
"filters": properties.get_all_properties(project_id=projectId, page=filter_query),
|
||||
"metadata": metadata.get_for_filters(project_id=projectId)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_events(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||
def get_event_properties(projectId: int, event_name: str = None,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not event_name or len(event_name) == 0:
|
||||
return {"data": []}
|
||||
return {"data": properties.get_event_properties(project_id=projectId, event_name=event_name)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
|
||||
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import schemas
|
||||
from chalicelib.core.metrics import product_anaytics2
|
||||
from fastapi import Depends
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
||||
def search_events(projectId: int,
|
||||
# data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
||||
|
|
@ -1,10 +1,12 @@
|
|||
from fastapi import Body, Depends
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Body, Depends, Query
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.usability_testing import service
|
||||
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from schemas import schemas
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
tags = ["usability-tests"]
|
||||
|
|
@ -77,9 +79,8 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
|
|||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
|
||||
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
|
||||
live: bool = False,
|
||||
user_id: str = None):
|
||||
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
|
||||
live: bool = False, user_id: str = None):
|
||||
"""
|
||||
Get sessions related to a specific UT test.
|
||||
|
||||
|
|
@ -88,20 +89,21 @@ async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int =
|
|||
"""
|
||||
|
||||
if live:
|
||||
return service.ut_tests_sessions_live(projectId, test_id, page, limit)
|
||||
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
|
||||
else:
|
||||
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
|
||||
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit, user_id, live)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
|
||||
async def get_responses(projectId: int, test_id: int, task_id: int,
|
||||
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
|
||||
"""
|
||||
Get responses related to a specific UT test.
|
||||
|
||||
- **project_id**: The unique identifier of the project.
|
||||
- **test_id**: The unique identifier of the UT test.
|
||||
"""
|
||||
return service.get_responses(test_id, task_id, page, limit, query)
|
||||
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query)
|
||||
|
||||
|
||||
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,4 @@
|
|||
from .schemas import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
from .schemas import _PaginatedSchema as PaginatedSchema
|
||||
|
|
|
|||
22
api/schemas/product_analytics.py
Normal file
22
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from typing import Optional, List, Literal, Union, Annotated
|
||||
from pydantic import Field
|
||||
|
||||
from .overrides import BaseModel
|
||||
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||
_PaginatedSchema, PropertyFilterSchema
|
||||
|
||||
|
||||
class EventSearchSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
name: str = Field(...)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
|
||||
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -404,6 +404,7 @@ class EventType(str, Enum):
|
|||
REQUEST_MOBILE = "requestMobile"
|
||||
ERROR_MOBILE = "errorMobile"
|
||||
SWIPE_MOBILE = "swipeMobile"
|
||||
EVENT = "event"
|
||||
|
||||
|
||||
class PerformanceEventType(str, Enum):
|
||||
|
|
@ -464,6 +465,7 @@ class SearchEventOperator(str, Enum):
|
|||
NOT_CONTAINS = "notContains"
|
||||
STARTS_WITH = "startsWith"
|
||||
ENDS_WITH = "endsWith"
|
||||
PATTERN = "regex"
|
||||
|
||||
|
||||
class ClickEventExtraOperator(str, Enum):
|
||||
|
|
@ -545,7 +547,66 @@ class RequestGraphqlFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
|
||||
class SessionSearchEventSchema2(BaseModel):
|
||||
class EventPredefinedPropertyType(str, Enum):
|
||||
TIME = "$time"
|
||||
SOURCE = "$source"
|
||||
DURATION_S = "$duration_s"
|
||||
DESCRIPTION = "description"
|
||||
AUTO_CAPTURED = "$auto_captured"
|
||||
SDK_EDITION = "$sdk_edition"
|
||||
SDK_VERSION = "$sdk_version"
|
||||
DEVICE_ID = "$device_id"
|
||||
OS = "$os"
|
||||
OS_VERSION = "$os_version"
|
||||
BROWSER = "$browser"
|
||||
BROWSER_VERSION = "$browser_version"
|
||||
DEVICE = "$device"
|
||||
SCREEN_HEIGHT = "$screen_height"
|
||||
SCREEN_WIDTH = "$screen_width"
|
||||
CURRENT_URL = "$current_url"
|
||||
INITIAL_REFERRER = "$initial_referrer"
|
||||
REFERRING_DOMAIN = "$referring_domain"
|
||||
REFERRER = "$referrer"
|
||||
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||
SEARCH_ENGINE = "$search_engine"
|
||||
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||
UTM_SOURCE = "utm_source"
|
||||
UTM_MEDIUM = "utm_medium"
|
||||
UTM_CAMPAIGN = "utm_campaign"
|
||||
COUNTRY = "$country"
|
||||
STATE = "$state"
|
||||
CITY = "$city"
|
||||
ISSUE_TYPE = "issue_type"
|
||||
TAGS = "$tags"
|
||||
IMPORT = "$import"
|
||||
|
||||
|
||||
class PropertyFilterSchema(BaseModel):
|
||||
is_event: Literal[False] = False
|
||||
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
value: List[Union[int, str]] = Field(...)
|
||||
|
||||
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_predefined(self) -> bool:
|
||||
return EventPredefinedPropertyType.has_value(self.name)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_name(self):
|
||||
if isinstance(self.name, Enum):
|
||||
self.name = self.name.value
|
||||
return self
|
||||
|
||||
|
||||
class EventPropertiesSchema(BaseModel):
|
||||
operator: Literal["and", "or"] = Field(...)
|
||||
filters: List[PropertyFilterSchema] = Field(...)
|
||||
|
||||
|
||||
class SessionSearchEventSchema(BaseModel):
|
||||
is_event: Literal[True] = True
|
||||
value: List[Union[str, int]] = Field(...)
|
||||
type: Union[EventType, PerformanceEventType] = Field(...)
|
||||
|
|
@ -553,6 +614,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||
|
||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
|
@ -660,12 +722,12 @@ def add_missing_is_event(values: dict):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema],
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
|
||||
|
||||
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
|
||||
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True)
|
||||
filters: List[GroupedFilterType] = Field(default_factory=list)
|
||||
sort: str = Field(default="startTs")
|
||||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
|
@ -690,6 +752,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
def add_missing_attributes(cls, values):
|
||||
# in case isEvent is wrong:
|
||||
for f in values.get("filters") or []:
|
||||
if f.get("type") is None:
|
||||
continue
|
||||
if EventType.has_value(f["type"]) and not f.get("isEvent"):
|
||||
f["isEvent"] = True
|
||||
elif FilterType.has_value(f["type"]) and f.get("isEvent"):
|
||||
|
|
@ -715,6 +779,15 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_pa_event_filter(self):
|
||||
for v in self.filters + self.events:
|
||||
if v.type == EventType.EVENT:
|
||||
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
|
||||
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
|
||||
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(self):
|
||||
n_filters = []
|
||||
|
|
@ -1135,7 +1208,7 @@ class CardPathAnalysis(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
|
||||
density: int = Field(default=4, ge=2, le=10)
|
||||
rows: int = Field(default=3, ge=1, le=10)
|
||||
rows: int = Field(default=5, ge=1, le=10)
|
||||
|
||||
start_type: Literal["start", "end"] = Field(default="start")
|
||||
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)
|
||||
|
|
@ -1404,7 +1477,7 @@ class MetricSearchSchema(_PaginatedSchema):
|
|||
mine_only: bool = Field(default=False)
|
||||
|
||||
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
|
||||
type: Literal[EventType.LOCATION] = Field(...)
|
||||
|
||||
|
||||
|
|
@ -1529,3 +1602,30 @@ class TagCreate(TagUpdate):
|
|||
|
||||
class ScopeSchema(BaseModel):
|
||||
scope: int = Field(default=1, ge=1, le=2)
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
|
|
|||
|
|
@ -19,14 +19,16 @@ const EVENTS_DEFINITION = {
|
|||
}
|
||||
};
|
||||
EVENTS_DEFINITION.emit = {
|
||||
NEW_AGENT: "NEW_AGENT",
|
||||
NO_AGENTS: "NO_AGENT",
|
||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
|
||||
NEW_AGENT: "NEW_AGENT",
|
||||
NO_AGENTS: "NO_AGENT",
|
||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||
AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
|
||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
|
||||
WEBRTC_CONFIG: "WEBRTC_CONFIG",
|
||||
};
|
||||
|
||||
const BASE_sessionInfo = {
|
||||
|
|
|
|||
|
|
@ -27,9 +27,14 @@ const respond = function (req, res, data) {
|
|||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
if (!res.aborted) {
|
||||
res.cork(() => {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
});
|
||||
} else {
|
||||
logger.debug("response aborted");
|
||||
return;
|
||||
}
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
|
|||
};
|
||||
|
||||
async function getRoomData(io, roomID) {
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
|
||||
const connected_sockets = await io.in(roomID).fetchSockets();
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let socket of connected_sockets) {
|
||||
|
|
@ -52,13 +52,19 @@ async function getRoomData(io, roomID) {
|
|||
} else {
|
||||
agentsCount++;
|
||||
agentIDs.push(socket.id);
|
||||
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
|
||||
if (socket.handshake.query.config !== undefined) {
|
||||
config = socket.handshake.query.config;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tabsCount = -1;
|
||||
agentsCount = -1;
|
||||
agentInfos = [];
|
||||
agentIDs = [];
|
||||
}
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs};
|
||||
return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
|
||||
}
|
||||
|
||||
function processNewSocket(socket) {
|
||||
|
|
@ -78,7 +84,7 @@ async function onConnect(socket) {
|
|||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||
|
||||
const io = getServer();
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
|
|
@ -100,7 +106,9 @@ async function onConnect(socket) {
|
|||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
logger.debug(`notifying new session about agent-existence`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
|
|
@ -118,7 +126,8 @@ async function onConnect(socket) {
|
|||
// Stats
|
||||
startAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
io.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, socket.handshake.query.config);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, { ...socket.handshake.query.agentInfo });
|
||||
}
|
||||
|
||||
// Set disconnect handler
|
||||
|
|
|
|||
|
|
@ -2,44 +2,71 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
"openreplay/backend/pkg/analytics"
|
||||
"openreplay/backend/pkg/db/postgres/pool"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
"openreplay/backend/pkg/metrics/database"
|
||||
"openreplay/backend/pkg/metrics/web"
|
||||
"openreplay/backend/pkg/server"
|
||||
"openreplay/backend/pkg/server/api"
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
log := logger.New()
|
||||
cfg := analyticsConfig.New(log)
|
||||
// Observability
|
||||
webMetrics := web.New("analytics")
|
||||
dbMetrics := database.New("analytics")
|
||||
metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
log.Info(ctx, "Cacher service started")
|
||||
|
||||
pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
sigchan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
for {
|
||||
select {
|
||||
case sig := <-sigchan:
|
||||
log.Error(ctx, "Caught signal %v: terminating", sig)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init services: %s", err)
|
||||
}
|
||||
|
||||
router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
}
|
||||
router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
|
||||
server.Run(ctx, log, &cfg.HTTP, router)
|
||||
}
|
||||
|
||||
//
|
||||
//import (
|
||||
// "context"
|
||||
//
|
||||
// analyticsConfig "openreplay/backend/internal/config/analytics"
|
||||
// "openreplay/backend/pkg/analytics"
|
||||
// "openreplay/backend/pkg/db/postgres/pool"
|
||||
// "openreplay/backend/pkg/logger"
|
||||
// "openreplay/backend/pkg/metrics"
|
||||
// "openreplay/backend/pkg/metrics/database"
|
||||
// "openreplay/backend/pkg/metrics/web"
|
||||
// "openreplay/backend/pkg/server"
|
||||
// "openreplay/backend/pkg/server/api"
|
||||
//)
|
||||
//
|
||||
//func main() {
|
||||
// ctx := context.Background()
|
||||
// log := logger.New()
|
||||
// cfg := analyticsConfig.New(log)
|
||||
// // Observability
|
||||
// webMetrics := web.New("analytics")
|
||||
// dbMetrics := database.New("analytics")
|
||||
// metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
|
||||
//
|
||||
// pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init postgres connection: %s", err)
|
||||
// }
|
||||
// defer pgConn.Close()
|
||||
//
|
||||
// builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "can't init services: %s", err)
|
||||
// }
|
||||
//
|
||||
// router, err := api.NewRouter(&cfg.HTTP, log)
|
||||
// if err != nil {
|
||||
// log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
// }
|
||||
// router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
|
||||
// router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
//
|
||||
// server.Run(ctx, log, &cfg.HTTP, router)
|
||||
//}
|
||||
|
|
|
|||
|
|
@ -111,12 +111,12 @@ var batches = map[string]string{
|
|||
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
|
||||
"mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
|
||||
"mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
|
|
@ -309,6 +309,7 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
|
|||
session.UserOSVersion,
|
||||
"mouse_thrashing",
|
||||
issueID,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("issuesEvents", err)
|
||||
|
|
@ -365,6 +366,7 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
|
|||
session.UserOSVersion,
|
||||
msg.Type,
|
||||
issueID,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("issuesEvents", err)
|
||||
|
|
@ -552,6 +554,7 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
|
|||
session.Platform,
|
||||
session.UserOSVersion,
|
||||
msgID,
|
||||
cropString(msg.Url),
|
||||
jsonString,
|
||||
); err != nil {
|
||||
c.checkError("errors", err)
|
||||
|
|
|
|||
|
|
@ -84,7 +84,10 @@ func (p *poolImpl) Begin() (*Tx, error) {
|
|||
tx, err := p.conn.Begin(context.Background())
|
||||
p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "")
|
||||
p.metrics.IncreaseTotalRequests("begin", "")
|
||||
return &Tx{tx, p.metrics}, err
|
||||
return &Tx{
|
||||
origTx: tx,
|
||||
metrics: p.metrics,
|
||||
}, err
|
||||
}
|
||||
|
||||
func (p *poolImpl) Close() {
|
||||
|
|
@ -94,13 +97,13 @@ func (p *poolImpl) Close() {
|
|||
// TX - start
|
||||
|
||||
type Tx struct {
|
||||
pgx.Tx
|
||||
origTx pgx.Tx
|
||||
metrics database.Database
|
||||
}
|
||||
|
||||
func (tx *Tx) TxExec(sql string, args ...interface{}) error {
|
||||
start := time.Now()
|
||||
_, err := tx.Exec(context.Background(), sql, args...)
|
||||
_, err := tx.origTx.Exec(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
|
|
@ -109,7 +112,7 @@ func (tx *Tx) TxExec(sql string, args ...interface{}) error {
|
|||
|
||||
func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
||||
start := time.Now()
|
||||
res := tx.QueryRow(context.Background(), sql, args...)
|
||||
res := tx.origTx.QueryRow(context.Background(), sql, args...)
|
||||
method, table := methodName(sql)
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
|
||||
tx.metrics.IncreaseTotalRequests(method, table)
|
||||
|
|
@ -118,7 +121,7 @@ func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
|
|||
|
||||
func (tx *Tx) TxRollback() error {
|
||||
start := time.Now()
|
||||
err := tx.Rollback(context.Background())
|
||||
err := tx.origTx.Rollback(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "")
|
||||
tx.metrics.IncreaseTotalRequests("rollback", "")
|
||||
return err
|
||||
|
|
@ -126,7 +129,7 @@ func (tx *Tx) TxRollback() error {
|
|||
|
||||
func (tx *Tx) TxCommit() error {
|
||||
start := time.Now()
|
||||
err := tx.Commit(context.Background())
|
||||
err := tx.origTx.Commit(context.Background())
|
||||
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "")
|
||||
tx.metrics.IncreaseTotalRequests("commit", "")
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ import (
|
|||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/google/uuid"
|
||||
"hash/fnv"
|
||||
"strconv"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
. "openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
|
|
@ -23,41 +24,7 @@ type ErrorEvent struct {
|
|||
Payload string
|
||||
Tags map[string]*string
|
||||
OriginType int
|
||||
}
|
||||
|
||||
func unquote(s string) string {
|
||||
if s[0] == '"' {
|
||||
return s[1 : len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
func parseTags(tagsJSON string) (tags map[string]*string, err error) {
|
||||
if len(tagsJSON) == 0 {
|
||||
return nil, fmt.Errorf("empty tags")
|
||||
}
|
||||
if tagsJSON[0] == '[' {
|
||||
var tagsArr []json.RawMessage
|
||||
if err = json.Unmarshal([]byte(tagsJSON), &tagsArr); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
tags = make(map[string]*string)
|
||||
for _, keyBts := range tagsArr {
|
||||
tags[unquote(string(keyBts))] = nil
|
||||
}
|
||||
} else if tagsJSON[0] == '{' {
|
||||
var tagsObj map[string]json.RawMessage
|
||||
if err = json.Unmarshal([]byte(tagsJSON), &tagsObj); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
tags = make(map[string]*string)
|
||||
for key, valBts := range tagsObj {
|
||||
val := unquote(string(valBts))
|
||||
tags[key] = &val
|
||||
}
|
||||
}
|
||||
return
|
||||
Url string
|
||||
}
|
||||
|
||||
func WrapJSException(m *JSException) (*ErrorEvent, error) {
|
||||
|
|
@ -69,6 +36,7 @@ func WrapJSException(m *JSException) (*ErrorEvent, error) {
|
|||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
OriginType: m.TypeID(),
|
||||
Url: m.Url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -81,6 +49,7 @@ func WrapIntegrationEvent(m *IntegrationEvent) *ErrorEvent {
|
|||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
OriginType: m.TypeID(),
|
||||
Url: m.Url,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
|
||||
// Add tracker version to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Handler's logic
|
||||
if req.ProjectKey == nil {
|
||||
|
|
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
// Add projectID to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
|
||||
|
||||
// Validate tracker version
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the project supports mobile sessions
|
||||
if !p.IsWeb() {
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ type Task struct {
|
|||
Duration int
|
||||
Status string
|
||||
Path string
|
||||
tx pool.Tx
|
||||
tx *pool.Tx
|
||||
}
|
||||
|
||||
func (t *Task) HasToTrim() bool {
|
||||
|
|
@ -65,7 +65,7 @@ func (t *tasksImpl) Get() (task *Task, err error) {
|
|||
}
|
||||
}()
|
||||
|
||||
task = &Task{tx: pool.Tx{Tx: tx}}
|
||||
task = &Task{tx: tx}
|
||||
sql := `SELECT spot_id, crop, duration FROM spots.tasks WHERE status = 'pending' ORDER BY added_time FOR UPDATE SKIP LOCKED LIMIT 1`
|
||||
err = tx.TxQueryRow(sql).Scan(&task.SpotID, &task.Crop, &task.Duration)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ func NewTranscoder(cfg *spot.Config, log logger.Logger, objStorage objectstorage
|
|||
tasks: NewTasks(conn),
|
||||
streams: NewStreams(log, conn, objStorage),
|
||||
spots: spots,
|
||||
metrics: metrics,
|
||||
}
|
||||
tnsc.prepareWorkers = workers.NewPool(2, 4, tnsc.prepare)
|
||||
tnsc.transcodeWorkers = workers.NewPool(2, 4, tnsc.transcode)
|
||||
|
|
|
|||
9
ee/api/.gitignore
vendored
9
ee/api/.gitignore
vendored
|
|
@ -223,11 +223,14 @@ Pipfile.lock
|
|||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/sessions/__init__.py
|
||||
/chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
/chalicelib/core/sessions/sessions_search_exp.py
|
||||
/chalicelib/core/metrics/modules
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
/chalicelib/core/sourcemaps_parser.py
|
||||
/chalicelib/core/sourcemaps
|
||||
/chalicelib/core/tags.py
|
||||
/chalicelib/core/product_analytics
|
||||
/chalicelib/saml
|
||||
/chalicelib/utils/__init__.py
|
||||
/chalicelib/utils/args_transformer.py
|
||||
|
|
@ -290,3 +293,5 @@ Pipfile.lock
|
|||
/chalicelib/core/errors/errors_ch.py
|
||||
/chalicelib/core/errors/errors_details.py
|
||||
/chalicelib/utils/contextual_validators.py
|
||||
/routers/subs/product_analytics.py
|
||||
/schemas/product_analytics.py
|
||||
|
|
|
|||
|
|
@ -6,25 +6,23 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==2.3.0"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.36.12"
|
||||
boto3 = "==1.37.21"
|
||||
pyjwt = "==2.10.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
|
||||
clickhouse-connect = "==0.8.15"
|
||||
elasticsearch = "==8.17.1"
|
||||
elasticsearch = "==8.17.2"
|
||||
jira = "==3.8.0"
|
||||
cachetools = "==5.5.1"
|
||||
fastapi = "==0.115.8"
|
||||
cachetools = "==5.5.2"
|
||||
fastapi = "==0.115.12"
|
||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||
gunicorn = "==23.0.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||
apscheduler = "==3.11.0"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.20"
|
||||
redis = "==5.2.1"
|
||||
azure-storage-blob = "==12.24.1"
|
||||
azure-storage-blob = "==12.25.0"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
|
|||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
|
|
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
|
|||
app.include_router(spot.app)
|
||||
app.include_router(spot.app_apikey)
|
||||
|
||||
app.include_router(product_anaytics.public_app)
|
||||
app.include_router(product_anaytics.app)
|
||||
app.include_router(product_anaytics.app_apikey)
|
||||
app.include_router(product_analytics.public_app, prefix="/ap")
|
||||
app.include_router(product_analytics.app, prefix="/ap")
|
||||
app.include_router(product_analytics.app_apikey, prefix="/ap")
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
app.include_router(saml.public_app)
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from . import sessions_pg
|
||||
from . import sessions_pg as sessions_legacy
|
||||
from . import sessions_ch
|
||||
from . import sessions_search as sessions_search_legacy
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental sessions search")
|
||||
from . import sessions_ch as sessions
|
||||
from . import sessions_search_exp as sessions_search
|
||||
else:
|
||||
from . import sessions_pg as sessions
|
||||
from . import sessions_search as sessions_search
|
||||
|
|
@ -927,12 +927,12 @@ def authenticate_sso(email: str, internal_id: str):
|
|||
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE),
|
||||
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE, for_spot=True),
|
||||
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
|
||||
tenant_id=r['tenantId'],
|
||||
iat=j_r.spot_jwt_refresh_iat,
|
||||
aud=spot.AUDIENCE,
|
||||
jwt_jti=j_r.spot_jwt_refresh_jti),
|
||||
jwt_jti=j_r.spot_jwt_refresh_jti, for_spot=True),
|
||||
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
|
||||
}
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -44,12 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
|
|||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/sessions/__init__.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
|
||||
rm -rf ./chalicelib/core/metrics/modules
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps.py
|
||||
rm -rf ./chalicelib/core/sourcemaps_parser.py
|
||||
rm -rf ./chalicelib/core/sourcemaps
|
||||
rm -rf ./chalicelib/core/user_testing.py
|
||||
rm -rf ./chalicelib/core/tags.py
|
||||
rm -rf ./chalicelib/core/product_analytics
|
||||
rm -rf ./chalicelib/saml
|
||||
rm -rf ./chalicelib/utils/__init__.py
|
||||
rm -rf ./chalicelib/utils/args_transformer.py
|
||||
|
|
@ -110,3 +113,5 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
|
|||
rm -rf ./chalicelib/core/errors/errors_ch.py
|
||||
rm -rf ./chalicelib/core/errors/errors_details.py
|
||||
rm -rf ./chalicelib/utils/contextual_validators.py
|
||||
rm -rf ./routers/subs/product_analytics.py
|
||||
rm -rf ./schemas/product_analytics.py
|
||||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
||||
azure-storage-blob==12.24.1
|
||||
azure-storage-blob==12.25.0
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.10.6
|
||||
apscheduler==3.11.0
|
||||
|
||||
redis==5.2.1
|
||||
azure-storage-blob==12.24.1
|
||||
azure-storage-blob==12.25.0
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
urllib3==2.3.0
|
||||
requests==2.32.3
|
||||
boto3==1.36.12
|
||||
boto3==1.37.21
|
||||
pyjwt==2.10.1
|
||||
psycopg2-binary==2.9.10
|
||||
psycopg[pool,binary]==3.2.4
|
||||
clickhouse-driver[lz4]==0.2.9
|
||||
psycopg[pool,binary]==3.2.6
|
||||
clickhouse-connect==0.8.15
|
||||
elasticsearch==8.17.1
|
||||
elasticsearch==8.17.2
|
||||
jira==3.8.0
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
|
||||
fastapi==0.115.8
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.0
|
||||
gunicorn==23.0.0
|
||||
python-decouple==3.8
|
||||
|
|
@ -19,10 +18,9 @@ apscheduler==3.11.0
|
|||
|
||||
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
|
||||
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
|
||||
python3-saml==1.16.0
|
||||
--no-binary=lxml
|
||||
|
||||
python-multipart==0.0.20
|
||||
|
||||
redis==5.2.1
|
||||
#confluent-kafka==2.1.0
|
||||
azure-storage-blob==12.24.1
|
||||
azure-storage-blob==12.25.0
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from .schemas import *
|
||||
from .schemas_ee import *
|
||||
from .assist_stats_schema import *
|
||||
from .product_analytics import *
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
|
|||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import schemas
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .overrides import BaseModel, Enum
|
||||
from .transformers_validators import remove_whitespace
|
||||
|
||||
|
||||
|
|
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
|||
return values
|
||||
|
||||
|
||||
class SessionModel(BaseModel):
|
||||
duration: int
|
||||
errorsCount: int
|
||||
eventsCount: int
|
||||
favorite: bool = Field(default=False)
|
||||
issueScore: int
|
||||
issueTypes: List[schemas.IssueType] = Field(default=[])
|
||||
metadata: dict = Field(default={})
|
||||
pagesCount: int
|
||||
platform: str
|
||||
projectId: int
|
||||
sessionId: str
|
||||
startTs: int
|
||||
timezone: Optional[str]
|
||||
userAnonymousId: Optional[str]
|
||||
userBrowser: str
|
||||
userCity: str
|
||||
userCountry: str
|
||||
userDevice: Optional[str]
|
||||
userDeviceType: str
|
||||
userId: Optional[str]
|
||||
userOs: str
|
||||
userState: str
|
||||
userUuid: str
|
||||
viewed: bool = Field(default=False)
|
||||
|
||||
|
||||
class AssistRecordUpdatePayloadSchema(BaseModel):
|
||||
name: str = Field(..., min_length=1)
|
||||
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
||||
|
|
|
|||
|
|
@ -83,9 +83,11 @@ if (process.env.uws !== "true") {
|
|||
const uWrapper = function (fn) {
|
||||
return (res, req) => {
|
||||
res.id = 1;
|
||||
res.aborted = false;
|
||||
req.startTs = performance.now(); // track request's start timestamp
|
||||
req.method = req.getMethod();
|
||||
res.onAborted(() => {
|
||||
res.aborted = true;
|
||||
onAbortedOrFinishedResponse(res);
|
||||
});
|
||||
return fn(req, res);
|
||||
|
|
|
|||
|
|
@ -3,20 +3,50 @@ const {getCompressionConfig} = require("./helper");
|
|||
const {logger} = require('./logger');
|
||||
|
||||
let io;
|
||||
const getServer = function () {return io;}
|
||||
|
||||
const getServer = function () {
|
||||
return io;
|
||||
const useRedis = process.env.redis === "true";
|
||||
let inMemorySocketsCache = [];
|
||||
let lastCacheUpdateTime = 0;
|
||||
const CACHE_REFRESH_INTERVAL = parseInt(process.env.cacheRefreshInterval) || 5000;
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
const now = Date.now();
|
||||
logger.info(`Using in-memory cache (age: ${now - lastCacheUpdateTime}ms)`);
|
||||
return inMemorySocketsCache;
|
||||
} else {
|
||||
try {
|
||||
return await io.fetchSockets();
|
||||
} catch (error) {
|
||||
logger.error('Error fetching sockets:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let redisClient;
|
||||
const useRedis = process.env.redis === "true";
|
||||
// Background refresher that runs independently of requests
|
||||
let cacheRefresher = null;
|
||||
function startCacheRefresher() {
|
||||
if (cacheRefresher) clearInterval(cacheRefresher);
|
||||
|
||||
if (useRedis) {
|
||||
const {createClient} = require("redis");
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
redisClient = createClient({url: REDIS_URL});
|
||||
redisClient.on("error", (error) => logger.error(`Redis error : ${error}`));
|
||||
void redisClient.connect();
|
||||
cacheRefresher = setInterval(async () => {
|
||||
const now = Date.now();
|
||||
// Only refresh if cache is stale
|
||||
if (now - lastCacheUpdateTime >= CACHE_REFRESH_INTERVAL) {
|
||||
logger.debug('Background refresh triggered');
|
||||
try {
|
||||
const startTime = performance.now();
|
||||
const result = await io.fetchSockets();
|
||||
inMemorySocketsCache = result;
|
||||
lastCacheUpdateTime = now;
|
||||
const duration = performance.now() - startTime;
|
||||
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
|
||||
} catch (error) {
|
||||
logger.error(`Background refresh error: ${error}`);
|
||||
}
|
||||
}
|
||||
}, CACHE_REFRESH_INTERVAL / 2);
|
||||
}
|
||||
|
||||
const processSocketsList = function (sockets) {
|
||||
|
|
@ -28,24 +58,6 @@ const processSocketsList = function (sockets) {
|
|||
return res
|
||||
}
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
try {
|
||||
let cachedResult = await redisClient.get('fetchSocketsResult');
|
||||
if (cachedResult) {
|
||||
return JSON.parse(cachedResult);
|
||||
}
|
||||
let result = await io.fetchSockets();
|
||||
let cachedString = JSON.stringify(processSocketsList(result));
|
||||
await redisClient.set('fetchSocketsResult', cachedString, {EX: 5});
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('Error setting value with expiration:', error);
|
||||
}
|
||||
}
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
if (!io) {
|
||||
return [];
|
||||
|
|
@ -84,6 +96,7 @@ const createSocketIOServer = function (server, prefix) {
|
|||
});
|
||||
io.attachApp(server);
|
||||
}
|
||||
startCacheRefresher();
|
||||
return io;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
|
|||
|
||||
// For the ender service only
|
||||
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
||||
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
|
||||
rows, err := s.db.Query(`
|
||||
SELECT
|
||||
session_id,
|
||||
CASE
|
||||
WHEN duration IS NULL OR duration < 0 THEN 0
|
||||
ELSE duration
|
||||
END,
|
||||
start_ts
|
||||
FROM sessions
|
||||
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
168
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
168
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.all_events;
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
||||
(
|
||||
project_id UInt16,
|
||||
auto_captured BOOL DEFAULT FALSE,
|
||||
event_name String,
|
||||
display_name String DEFAULT '',
|
||||
description String DEFAULT '',
|
||||
event_count_l30days UInt32 DEFAULT 0,
|
||||
query_count_l30days UInt32 DEFAULT 0,
|
||||
|
||||
created_at DateTime64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, auto_captured, event_name);
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
|
||||
TO product_analytics.all_events AS
|
||||
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
|
||||
`$auto_captured` AS auto_captured,
|
||||
`$event_name` AS event_name,
|
||||
display_name,
|
||||
description
|
||||
FROM product_analytics.events
|
||||
LEFT JOIN (SELECT project_id,
|
||||
auto_captured,
|
||||
event_name,
|
||||
display_name,
|
||||
description
|
||||
FROM product_analytics.all_events
|
||||
WHERE all_events.display_name != ''
|
||||
OR all_events.description != '') AS old_data
|
||||
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
|
||||
events.`$event_name` = old_data.event_name);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
value_type String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name, value_type);
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
|
||||
TO product_analytics.event_properties AS
|
||||
SELECT project_id,
|
||||
`$event_name` AS event_name,
|
||||
property_name,
|
||||
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
|
||||
TO product_analytics.event_properties AS
|
||||
SELECT project_id,
|
||||
`$event_name` AS event_name,
|
||||
property_name,
|
||||
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.all_properties;
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
property_name String,
|
||||
is_event_property BOOL,
|
||||
display_name String DEFAULT '',
|
||||
description String DEFAULT '',
|
||||
status String DEFAULT 'visible' COMMENT 'visible/hidden/dropped',
|
||||
data_count UInt32 DEFAULT 1,
|
||||
query_count UInt32 DEFAULT 0,
|
||||
|
||||
created_at DateTime64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, property_name, is_event_property);
|
||||
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
|
||||
TO product_analytics.all_properties AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
|
||||
LEFT JOIN (SELECT project_id,
|
||||
property_name,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.all_properties
|
||||
WHERE (all_properties.display_name != ''
|
||||
OR all_properties.description != '')
|
||||
AND is_event_property) AS old_data
|
||||
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
|
||||
TO product_analytics.all_properties AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
|
||||
LEFT JOIN (SELECT project_id,
|
||||
property_name,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.all_properties
|
||||
WHERE (all_properties.display_name != ''
|
||||
OR all_properties.description != '')
|
||||
AND is_event_property) AS old_data
|
||||
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
|
||||
(
|
||||
project_id UInt16,
|
||||
property_name String,
|
||||
is_event_property BOOL,
|
||||
value String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
)
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, property_name, is_event_property);
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
|
||||
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
JSONExtractString(toString(`$properties`), property_name) AS value
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
|
||||
WHERE randCanonical() < 0.5 -- This randomly skips inserts
|
||||
AND value != ''
|
||||
LIMIT 2 BY project_id,property_name
|
||||
UNION ALL
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
JSONExtractString(toString(`properties`), property_name) AS value
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
|
||||
WHERE randCanonical() < 0.5 -- This randomly skips inserts
|
||||
AND value != ''
|
||||
LIMIT 2 BY project_id,property_name;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||
CREATE DATABASE IF NOT EXISTS experimental;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||
|
|
@ -88,7 +88,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
TTL datetime + INTERVAL 1 MONTH;
|
||||
|
||||
|
||||
|
||||
|
|
@ -140,7 +140,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH
|
||||
TTL datetime + INTERVAL 1 MONTH
|
||||
SETTINGS index_granularity = 512;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
||||
|
|
@ -189,7 +189,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, issue_id, type)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
TTL _timestamp + INTERVAL 1 MONTH;
|
||||
|
||||
|
||||
|
||||
|
|
@ -330,7 +330,7 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
TTL datetime + INTERVAL 1 MONTH;
|
||||
|
||||
|
||||
SET allow_experimental_json_type = 1;
|
||||
|
|
@ -639,9 +639,11 @@ CREATE TABLE IF NOT EXISTS product_analytics.group_properties
|
|||
|
||||
|
||||
-- The full list of events
|
||||
-- Experimental: This table is filled by an incremental materialized view
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
||||
(
|
||||
project_id UInt16,
|
||||
auto_captured BOOL DEFAULT FALSE,
|
||||
event_name String,
|
||||
display_name String DEFAULT '',
|
||||
description String DEFAULT '',
|
||||
|
|
@ -651,10 +653,68 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
|||
created_at DateTime64,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name);
|
||||
ORDER BY (project_id, auto_captured, event_name);
|
||||
|
||||
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
|
||||
-- Incremental materialized view to fill all_events using $properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
|
||||
TO product_analytics.all_events AS
|
||||
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
|
||||
`$auto_captured` AS auto_captured,
|
||||
`$event_name` AS event_name,
|
||||
display_name,
|
||||
description
|
||||
FROM product_analytics.events
|
||||
LEFT JOIN (SELECT project_id,
|
||||
auto_captured,
|
||||
event_name,
|
||||
display_name,
|
||||
description
|
||||
FROM product_analytics.all_events
|
||||
WHERE all_events.display_name != ''
|
||||
OR all_events.description != '') AS old_data
|
||||
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
|
||||
events.`$event_name` = old_data.event_name);
|
||||
-- -------- END ---------
|
||||
|
||||
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||
-- Experimental: This table is filled by an incremental materialized view
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
event_name String,
|
||||
property_name String,
|
||||
value_type String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, event_name, property_name, value_type);
|
||||
|
||||
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
|
||||
-- Incremental materialized view to fill event_properties using $properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
|
||||
TO product_analytics.event_properties AS
|
||||
SELECT project_id,
|
||||
`$event_name` AS event_name,
|
||||
property_name,
|
||||
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
|
||||
|
||||
-- Incremental materialized view to fill event_properties using properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
|
||||
TO product_analytics.event_properties AS
|
||||
SELECT project_id,
|
||||
`$event_name` AS event_name,
|
||||
property_name,
|
||||
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
|
||||
-- -------- END ---------
|
||||
|
||||
|
||||
-- The full list of properties (events and users)
|
||||
-- Experimental: This table is filled by an incremental materialized view
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||
(
|
||||
project_id UInt16,
|
||||
|
|
@ -670,3 +730,95 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, property_name, is_event_property);
|
||||
|
||||
|
||||
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
|
||||
-- Incremental materialized view to fill all_properties using $properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
|
||||
TO product_analytics.all_properties AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
|
||||
LEFT JOIN (SELECT project_id,
|
||||
property_name,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.all_properties
|
||||
WHERE (all_properties.display_name != ''
|
||||
OR all_properties.description != '')
|
||||
AND is_event_property) AS old_data
|
||||
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
|
||||
|
||||
-- Incremental materialized view to fill all_properties using properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
|
||||
TO product_analytics.all_properties AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
|
||||
LEFT JOIN (SELECT project_id,
|
||||
property_name,
|
||||
display_name,
|
||||
description,
|
||||
status,
|
||||
data_count,
|
||||
query_count
|
||||
FROM product_analytics.all_properties
|
||||
WHERE (all_properties.display_name != ''
|
||||
OR all_properties.description != '')
|
||||
AND is_event_property) AS old_data
|
||||
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
|
||||
-- -------- END ---------
|
||||
|
||||
-- Some random examples of property-values, limited by 2 per property
|
||||
-- Experimental: This table is filled by a refreshable materialized view
|
||||
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
|
||||
(
|
||||
project_id UInt16,
|
||||
property_name String,
|
||||
is_event_property BOOL,
|
||||
value String,
|
||||
|
||||
_timestamp DateTime DEFAULT now()
|
||||
)
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, property_name, is_event_property);
|
||||
-- Incremental materialized view to get random examples of property values using $properties & properties
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
|
||||
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
JSONExtractString(toString(`$properties`), property_name) AS value
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
|
||||
WHERE randCanonical() < 0.5 -- This randomly skips inserts
|
||||
AND value != ''
|
||||
LIMIT 2 BY project_id,property_name
|
||||
UNION ALL
|
||||
-- using union because each table should be the target of 1 single refreshable MV
|
||||
SELECT project_id,
|
||||
property_name,
|
||||
TRUE AS is_event_property,
|
||||
JSONExtractString(toString(`properties`), property_name) AS value
|
||||
FROM product_analytics.events
|
||||
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
|
||||
WHERE randCanonical() < 0.5 -- This randomly skips inserts
|
||||
AND value != ''
|
||||
LIMIT 2 BY project_id,property_name;
|
||||
|
|
|
|||
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
\set previous_version 'v1.22.0-ee'
|
||||
\set next_version 'v1.23.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
--
|
||||
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
\set or_version 'v1.22.0-ee'
|
||||
\set or_version 'v1.23.0-ee'
|
||||
SET client_min_messages TO NOTICE;
|
||||
\set ON_ERROR_STOP true
|
||||
SELECT EXISTS (SELECT 1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
|
||||
DROP TABLE IF EXISTS product_analytics.event_properties;
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
\set previous_version 'v1.23.0-ee'
|
||||
\set next_version 'v1.22.0-ee'
|
||||
SELECT openreplay_version() AS current_version,
|
||||
openreplay_version() = :'previous_version' AS valid_previous,
|
||||
openreplay_version() = :'next_version' AS is_next
|
||||
\gset
|
||||
|
||||
\if :valid_previous
|
||||
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
|
||||
BEGIN;
|
||||
SELECT format($fn_def$
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT '%1$s'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
$fn_def$, :'next_version')
|
||||
\gexec
|
||||
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
\echo new version detected :'next_version', nothing to do
|
||||
\else
|
||||
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||
\endif
|
||||
|
|
@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
|
|||
MINIO_SECRET_KEY = ''
|
||||
|
||||
# APP and TRACKER VERSIONS
|
||||
VERSION = 1.22.0
|
||||
TRACKER_VERSION = '16.0.1'
|
||||
VERSION = 1.23.0
|
||||
TRACKER_VERSION = '17.0.0'
|
||||
|
|
|
|||
|
|
@ -125,13 +125,13 @@ function PrivateRoutes() {
|
|||
}, [siteId]);
|
||||
|
||||
React.useEffect(() => {
|
||||
debounceSearch = debounce(() => searchStore.fetchSessions(), 500);
|
||||
debounceSearch = debounce(() => searchStore.fetchSessions(), 250);
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!searchStore.urlParsed) return;
|
||||
debounceSearch();
|
||||
}, [searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
||||
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
||||
|
||||
return (
|
||||
<Suspense fallback={<Loader loading className="flex-1" />}>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import cn from 'classnames';
|
||||
import Counter from 'App/components/shared/SessionItem/Counter';
|
||||
import Draggable from 'react-draggable';
|
||||
import { useDraggable } from '@neodrag/react';
|
||||
import type { LocalStream } from 'Player';
|
||||
import { PlayerContext } from 'App/components/Session/playerContext';
|
||||
import ChatControls from '../ChatControls/ChatControls';
|
||||
|
|
@ -25,6 +25,8 @@ function ChatWindow({
|
|||
isPrestart,
|
||||
}: Props) {
|
||||
const { t } = useTranslation();
|
||||
const dragRef = React.useRef<HTMLDivElement>(null);
|
||||
useDraggable(dragRef, { bounds: 'body', defaultPosition: { x: 50, y: 200 } })
|
||||
const { player } = React.useContext(PlayerContext);
|
||||
|
||||
const { toggleVideoLocalStream } = player.assistManager;
|
||||
|
|
@ -39,11 +41,7 @@ function ChatWindow({
|
|||
}, [localVideoEnabled]);
|
||||
|
||||
return (
|
||||
<Draggable
|
||||
handle=".handle"
|
||||
bounds="body"
|
||||
defaultPosition={{ x: 50, y: 200 }}
|
||||
>
|
||||
<div ref={dragRef}>
|
||||
<div
|
||||
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
|
||||
style={{ width: '280px' }}
|
||||
|
|
@ -102,7 +100,7 @@ function ChatWindow({
|
|||
isPrestart={isPrestart}
|
||||
/>
|
||||
</div>
|
||||
</Draggable>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
{ stream: MediaStream; isAgent: boolean }[] | null
|
||||
>([]);
|
||||
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null>(
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null | undefined>(
|
||||
null,
|
||||
);
|
||||
|
||||
|
|
@ -135,6 +135,7 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
}, [peerConnectionStatus]);
|
||||
|
||||
const addIncomeStream = (stream: MediaStream, isAgent: boolean) => {
|
||||
if (!stream.active) return;
|
||||
setIncomeStream((oldState) => {
|
||||
if (oldState === null) return [{ stream, isAgent }];
|
||||
if (
|
||||
|
|
@ -149,13 +150,8 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
});
|
||||
};
|
||||
|
||||
const removeIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream((prevState) => {
|
||||
if (!prevState) return [];
|
||||
return prevState.filter(
|
||||
(existingStream) => existingStream.stream.id !== stream.id,
|
||||
);
|
||||
});
|
||||
const removeIncomeStream = () => {
|
||||
setIncomeStream([]);
|
||||
};
|
||||
|
||||
function onReject() {
|
||||
|
|
@ -181,7 +177,12 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream(lStream.stream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
() => {
|
||||
player.assistManager.ping(AssistActionsPing.call.end, agentId);
|
||||
lStream.stop.apply(lStream);
|
||||
removeIncomeStream();
|
||||
},
|
||||
onReject,
|
||||
onError,
|
||||
|
|
|
|||
|
|
@ -34,43 +34,40 @@ function VideoContainer({
|
|||
}
|
||||
const iid = setInterval(() => {
|
||||
const track = stream.getVideoTracks()[0];
|
||||
const settings = track?.getSettings();
|
||||
const isDummyVideoTrack = settings
|
||||
? settings.width === 2 ||
|
||||
settings.frameRate === 0 ||
|
||||
(!settings.frameRate && !settings.width)
|
||||
: true;
|
||||
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
|
||||
|
||||
if (isEnabled !== shouldBeEnabled) {
|
||||
setEnabled(shouldBeEnabled);
|
||||
setRemoteEnabled?.(shouldBeEnabled);
|
||||
if (track) {
|
||||
if (!track.enabled) {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
} else {
|
||||
setEnabled(true);
|
||||
setRemoteEnabled?.(true);
|
||||
}
|
||||
} else {
|
||||
setEnabled(false);
|
||||
setRemoteEnabled?.(false);
|
||||
}
|
||||
}, 500);
|
||||
return () => clearInterval(iid);
|
||||
}, [stream, isEnabled]);
|
||||
}, [stream]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex-1"
|
||||
style={{
|
||||
display: isEnabled ? undefined : 'none',
|
||||
width: isEnabled ? undefined : '0px!important',
|
||||
height: isEnabled ? undefined : '0px!important',
|
||||
height: isEnabled ? undefined : '0px !important',
|
||||
border: '1px solid grey',
|
||||
transform: local ? 'scaleX(-1)' : undefined,
|
||||
display: isEnabled ? 'block' : 'none',
|
||||
}}
|
||||
>
|
||||
<video autoPlay ref={ref} muted={muted} style={{ height }} />
|
||||
{isAgent ? (
|
||||
<div
|
||||
style={{
|
||||
position: 'absolute',
|
||||
}}
|
||||
>
|
||||
{t('Agent')}
|
||||
</div>
|
||||
) : null}
|
||||
<video
|
||||
autoPlay
|
||||
ref={ref}
|
||||
muted={muted}
|
||||
style={{ height }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ProfilerDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerProfiler from '@openreplay/tracker-profiler';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
|
|||
const fn = profiler('call_name')(() => {
|
||||
//...
|
||||
}, thisArg); // thisArg is optional`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
|
||||
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
|
|||
|
||||
function AssistNpm(props) {
|
||||
const { t } = useTranslation();
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerAssist from '@openreplay/tracker-assist';
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}',
|
||||
});
|
||||
tracker.start()
|
||||
|
||||
tracker.use(trackerAssist(options)); // check the list of available options below`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerAssist from '@openreplay/tracker-assist/cjs';
|
||||
const tracker = new OpenReplay({
|
||||
|
||||
tracker.configure({
|
||||
projectKey: '${props.projectKey}'
|
||||
});
|
||||
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below
|
||||
|
|
|
|||
|
|
@ -14,19 +14,20 @@ function GraphQLDoc() {
|
|||
const projectKey = siteId
|
||||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
||||
const usage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
//...
|
||||
export const recordGraphQL = tracker.use(trackerGraphQL());`;
|
||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -15,20 +15,21 @@ function MobxDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
|
||||
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
|
||||
const mobxUsage = `import { tracker } from '@openreplay/tracker';
|
||||
import trackerMobX from '@openreplay/tracker-mobx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
tracker.start();
|
||||
`;
|
||||
|
||||
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
||||
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerMobX from '@openreplay/tracker-mobx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function NgRxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
const usage = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
|
|||
export class AppModule {}`;
|
||||
const usageCjs = `import { StoreModule } from '@ngrx/store';
|
||||
import { reducers } from './reducers';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -17,10 +17,10 @@ function PiniaDoc() {
|
|||
? sites.find((site) => site.id === siteId)?.projectKey
|
||||
: sites[0]?.projectKey;
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function ReduxDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerRedux from '@openreplay/tracker-redux';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = createStore(
|
|||
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
|
||||
);`;
|
||||
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerRedux from '@openreplay/tracker-redux/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ function VueDoc() {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start()
|
||||
|
|
@ -29,10 +29,11 @@ const store = new Vuex.Store({
|
|||
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
|
||||
});`;
|
||||
const usageCjs = `import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
tracker.configure({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
|
|
|||
|
|
@ -16,11 +16,10 @@ function ZustandDoc(props) {
|
|||
: sites[0]?.projectKey;
|
||||
|
||||
const usage = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker';
|
||||
import { tracker } from '@openreplay/tracker';
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
@ -43,11 +42,12 @@ const useBearStore = create(
|
|||
)
|
||||
`;
|
||||
const usageCjs = `import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker/cjs';
|
||||
import { tracker } from '@openreplay/tracker/cjs';
|
||||
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
tracker.configure({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ function ModuleCard(props: Props) {
|
|||
<Switch
|
||||
size="small"
|
||||
checked={!module.isEnabled}
|
||||
title={module.isEnabled ? 'Enabled' : 'Disabled'}
|
||||
title={!module.isEnabled ? 'Enabled' : 'Disabled'}
|
||||
onChange={() => props.onToggle(module)}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -40,11 +40,12 @@ function Modules() {
|
|||
};
|
||||
|
||||
useEffect(() => {
|
||||
list(t).forEach((module) => {
|
||||
const moduleList = list(t)
|
||||
moduleList.forEach((module) => {
|
||||
module.isEnabled = modules.includes(module.key);
|
||||
});
|
||||
setModulesState(
|
||||
list(t).filter(
|
||||
moduleList.filter(
|
||||
(module) => !module.hidden && (!module.enterprise || isEnterprise),
|
||||
),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import withPageTitle from 'HOCs/withPageTitle';
|
|||
import { PageTitle } from 'UI';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import LanguageSwitcher from "App/components/LanguageSwitcher";
|
||||
import Settings from './Settings';
|
||||
import ChangePassword from './ChangePassword';
|
||||
import styles from './profileSettings.module.css';
|
||||
|
|
@ -20,107 +21,90 @@ function ProfileSettings() {
|
|||
return (
|
||||
<div className="bg-white rounded-lg border shadow-sm p-5">
|
||||
<PageTitle title={<div>{t('Account')}</div>} />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Profile')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t(
|
||||
'Your email address is your identity on OpenReplay and is used to login.',
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Settings />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Profile')}
|
||||
description={t('Your email address is your identity on OpenReplay and is used to login.')}
|
||||
children={<Settings />}
|
||||
/>
|
||||
|
||||
<div className="border-b my-10" />
|
||||
|
||||
{account.hasPassword && (
|
||||
<>
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Change Password')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Updating your password from time to time enhances your account’s security.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<ChangePassword />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Change Password')}
|
||||
description={t('Updating your password from time to time enhaces your account’s security')}
|
||||
children={<ChangePassword />}
|
||||
/>
|
||||
|
||||
<div className="border-b my-10" />
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Organization API Key')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Your API key gives you access to an extra set of services.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Api />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Interface Language')}
|
||||
description={t('Select the language in which OpenReplay will appear.')}
|
||||
children={<LanguageSwitcher />}
|
||||
/>
|
||||
|
||||
<Section
|
||||
title={t('Organization API Key')}
|
||||
description={t('Your API key gives you access to an extra set of services.')}
|
||||
children={<Api />}
|
||||
/>
|
||||
|
||||
{isEnterprise && (account.admin || account.superAdmin) && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Tenant Key')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('For SSO (SAML) authentication.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<TenantKey />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Tenant Key')}
|
||||
description={t('For SSO (SAML) authentication.')}
|
||||
children={<TenantKey />}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{!isEnterprise && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('Data Collection')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<OptOut />
|
||||
</div>
|
||||
</div>
|
||||
<Section
|
||||
title={t('Data Collection')}
|
||||
description={t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
||||
children={<OptOut />}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{account.license && (
|
||||
<>
|
||||
<div className="border-b my-10" />
|
||||
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{t('License')}</h4>
|
||||
<div className={styles.info}>
|
||||
{t('License key and expiration date.')}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Licenses />
|
||||
</div>
|
||||
</div>
|
||||
<Section title={t('License')} description={t('License key and expiration date.')} children={<Licenses />} />
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Section({ title, description, children }: {
|
||||
title: string;
|
||||
description: string;
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
<div className={styles.left}>
|
||||
<h4 className="text-lg mb-4">{title}</h4>
|
||||
<div className={styles.info}>
|
||||
{description}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default withPageTitle('Account - OpenReplay Preferences')(
|
||||
observer(ProfileSettings),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,32 +0,0 @@
|
|||
import React from 'react';
|
||||
import cn from 'classnames';
|
||||
import { Styles } from '../../common';
|
||||
import stl from './scale.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function Scale({ colors }) {
|
||||
const { t } = useTranslation();
|
||||
const lastIndex = Styles.compareColors.length - 1;
|
||||
|
||||
return (
|
||||
<div className={cn(stl.bars, 'absolute bottom-0 mb-4')}>
|
||||
{Styles.compareColors.map((c, i) => (
|
||||
<div
|
||||
key={i}
|
||||
style={{
|
||||
backgroundColor: c,
|
||||
width: '6px',
|
||||
height: '15px',
|
||||
marginBottom: '1px',
|
||||
}}
|
||||
className="flex items-center justify-center"
|
||||
>
|
||||
{i === 0 && <div className="text-xs pl-12">{t('Slow')}</div>}
|
||||
{i === lastIndex && <div className="text-xs pl-12">{t('Fast')}</div>}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Scale;
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
.maps {
|
||||
height: auto;
|
||||
width: 110%;
|
||||
stroke: $gray-medium;
|
||||
stroke-width: 1;
|
||||
stroke-linecap: round;
|
||||
stroke-linejoin: round;
|
||||
margin-top: -20px;
|
||||
}
|
||||
|
||||
.location {
|
||||
fill: $gray-light !important;
|
||||
cursor: pointer;
|
||||
stroke: #fff;
|
||||
|
||||
&:focus,
|
||||
&:hover {
|
||||
fill: #2E3ECC !important;
|
||||
outline: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.heat_index0 {
|
||||
fill:$gray-light !important;
|
||||
}
|
||||
|
||||
.heat_index5 {
|
||||
fill: #B0B8FF !important;
|
||||
}
|
||||
|
||||
.heat_index4 {
|
||||
fill:#6171FF !important;
|
||||
}
|
||||
|
||||
.heat_index3 {
|
||||
fill: #394EFF !important;
|
||||
}
|
||||
|
||||
.heat_index2 {
|
||||
fill: #2E3ECC !important;
|
||||
}
|
||||
|
||||
.heat_index1 {
|
||||
fill: #222F99 !important;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
position: fixed;
|
||||
padding: 5px;
|
||||
border: 1px solid $gray-light;
|
||||
border-radius: 3px;
|
||||
background-color: white;
|
||||
font-size: 12px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
import React from 'react';
|
||||
import { NoContent } from 'UI';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { numberWithCommas, positionOfTheNumber } from 'App/utils';
|
||||
import WorldMap from '@svg-maps/world';
|
||||
import { SVGMap } from 'react-svg-map';
|
||||
import cn from 'classnames';
|
||||
import { NO_METRIC_DATA } from 'App/constants/messages';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import stl from './SpeedIndexByLocation.module.css';
|
||||
import Scale from './Scale';
|
||||
import { Styles, AvgLabel } from '../../common';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
data?: any;
|
||||
}
|
||||
|
||||
function SpeedIndexByLocation(props: Props) {
|
||||
const { t } = useTranslation();
|
||||
const { data } = props;
|
||||
const wrapper: any = React.useRef(null);
|
||||
const [tooltipStyle, setTooltipStyle] = React.useState({ display: 'none' });
|
||||
const [pointedLocation, setPointedLocation] = React.useState<any>(null);
|
||||
|
||||
const dataMap: any = React.useMemo(() => {
|
||||
const _data: any = {};
|
||||
const max = data.chart?.reduce(
|
||||
(acc: any, item: any) => Math.max(acc, item.value),
|
||||
0,
|
||||
);
|
||||
const min = data.chart?.reduce(
|
||||
(acc: any, item: any) => Math.min(acc, item.value),
|
||||
0,
|
||||
);
|
||||
data.chart?.forEach((item: any) => {
|
||||
if (!item || !item.userCountry) {
|
||||
return;
|
||||
}
|
||||
item.perNumber = positionOfTheNumber(min, max, item.value, 5);
|
||||
_data[item.userCountry.toLowerCase()] = item;
|
||||
});
|
||||
return _data;
|
||||
}, [data.chart]);
|
||||
|
||||
const getLocationClassName = (location: any) => {
|
||||
const i = dataMap[location.id] ? dataMap[location.id].perNumber : 0;
|
||||
const cls = stl[`heat_index${i}`];
|
||||
return cn(stl.location, cls);
|
||||
};
|
||||
|
||||
const getLocationName = (event: any) => {
|
||||
if (!event) return null;
|
||||
const id = event.target.attributes.id.value;
|
||||
const name = event.target.attributes.name.value;
|
||||
const percentage = dataMap[id] ? dataMap[id].perNumber : 0;
|
||||
return { name, id, percentage };
|
||||
};
|
||||
|
||||
const handleLocationMouseOver = (event: any) => {
|
||||
const pointedLocation = getLocationName(event);
|
||||
setPointedLocation(pointedLocation);
|
||||
};
|
||||
|
||||
const handleLocationMouseOut = () => {
|
||||
setTooltipStyle({ display: 'none' });
|
||||
setPointedLocation(null);
|
||||
};
|
||||
|
||||
const handleLocationMouseMove = (event: any) => {
|
||||
const tooltipStyle = {
|
||||
display: 'block',
|
||||
top: event.clientY + 10,
|
||||
left: event.clientX - 100,
|
||||
};
|
||||
setTooltipStyle(tooltipStyle);
|
||||
};
|
||||
|
||||
return (
|
||||
<NoContent
|
||||
size="small"
|
||||
show={false}
|
||||
style={{ height: '240px' }}
|
||||
title={
|
||||
<div className="flex items-center gap-2 text-base font-normal">
|
||||
<InfoCircleOutlined size={12} /> {NO_METRIC_DATA}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="absolute right-0 mr-4 top=0 w-full flex justify-end">
|
||||
<AvgLabel text="Avg" count={Math.round(data.value)} unit="ms" />
|
||||
</div>
|
||||
<Scale colors={Styles.compareColors} />
|
||||
<div className="map-target" />
|
||||
<div
|
||||
style={{
|
||||
height: '234px',
|
||||
width: '100%',
|
||||
margin: '0 auto',
|
||||
display: 'flex',
|
||||
}}
|
||||
ref={wrapper}
|
||||
>
|
||||
<SVGMap
|
||||
map={WorldMap}
|
||||
className={stl.maps}
|
||||
locationClassName={getLocationClassName}
|
||||
onLocationMouseOver={handleLocationMouseOver}
|
||||
onLocationMouseOut={handleLocationMouseOut}
|
||||
onLocationMouseMove={handleLocationMouseMove}
|
||||
/>
|
||||
</div>
|
||||
<div className={stl.tooltip} style={tooltipStyle}>
|
||||
{pointedLocation && (
|
||||
<>
|
||||
<div>{pointedLocation.name}</div>
|
||||
<div>
|
||||
{t('Avg:')}{' '}
|
||||
<strong>
|
||||
{dataMap[pointedLocation.id]
|
||||
? numberWithCommas(
|
||||
parseInt(dataMap[pointedLocation.id].value),
|
||||
)
|
||||
: 0}
|
||||
</strong>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</NoContent>
|
||||
);
|
||||
}
|
||||
|
||||
export default observer(SpeedIndexByLocation);
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default } from './SpeedIndexByLocation';
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
.bars {
|
||||
& div:first-child {
|
||||
border-top-left-radius: 3px;
|
||||
border-top-right-radius: 3px;
|
||||
}
|
||||
|
||||
& div:last-child {
|
||||
border-bottom-left-radius: 3px;
|
||||
border-bottom-right-radius: 3px;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
import React from 'react';
|
||||
import ExCard from 'Components/Dashboard/components/DashboardList/NewDashModal/Examples/ExCard';
|
||||
import InsightsCard from 'Components/Dashboard/Widgets/CustomMetricsWidgets/InsightsCard';
|
||||
import { InsightIssue } from 'App/mstore/types/widget';
|
||||
import SessionsPerBrowser from 'Components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
|
||||
import SpeedIndexByLocation from 'Components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation';
|
||||
|
||||
interface Props {
|
||||
title: string;
|
||||
type: string;
|
||||
onCard: (card: string) => void;
|
||||
}
|
||||
|
||||
function SpeedIndexByLocationExample(props: Props) {
|
||||
const data = {
|
||||
value: 1480,
|
||||
chart: [
|
||||
{
|
||||
userCountry: 'AT',
|
||||
value: 415,
|
||||
},
|
||||
{
|
||||
userCountry: 'PL',
|
||||
value: 433.1666666666667,
|
||||
},
|
||||
{
|
||||
userCountry: 'FR',
|
||||
value: 502,
|
||||
},
|
||||
{
|
||||
userCountry: 'IT',
|
||||
value: 540.4117647058823,
|
||||
},
|
||||
{
|
||||
userCountry: 'TH',
|
||||
value: 662.0,
|
||||
},
|
||||
{
|
||||
userCountry: 'ES',
|
||||
value: 740.5454545454545,
|
||||
},
|
||||
{
|
||||
userCountry: 'SG',
|
||||
value: 889.6666666666666,
|
||||
},
|
||||
{
|
||||
userCountry: 'TW',
|
||||
value: 1008.0,
|
||||
},
|
||||
{
|
||||
userCountry: 'HU',
|
||||
value: 1027.0,
|
||||
},
|
||||
{
|
||||
userCountry: 'DE',
|
||||
value: 1054.4583333333333,
|
||||
},
|
||||
{
|
||||
userCountry: 'BE',
|
||||
value: 1126.0,
|
||||
},
|
||||
{
|
||||
userCountry: 'TR',
|
||||
value: 1174.0,
|
||||
},
|
||||
{
|
||||
userCountry: 'US',
|
||||
value: 1273.3015873015872,
|
||||
},
|
||||
{
|
||||
userCountry: 'GB',
|
||||
value: 1353.8095238095239,
|
||||
},
|
||||
{
|
||||
userCountry: 'VN',
|
||||
value: 1473.8181818181818,
|
||||
},
|
||||
{
|
||||
userCountry: 'HK',
|
||||
value: 1654.6666666666667,
|
||||
},
|
||||
],
|
||||
unit: 'ms',
|
||||
};
|
||||
return (
|
||||
<ExCard {...props}>
|
||||
<SpeedIndexByLocation data={data} />
|
||||
</ExCard>
|
||||
);
|
||||
}
|
||||
|
||||
export default SpeedIndexByLocationExample;
|
||||
|
|
@ -68,7 +68,7 @@ function MetricsList({
|
|||
}, [metricStore]);
|
||||
|
||||
|
||||
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== 'all';
|
||||
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== '';
|
||||
|
||||
const searchImageDimensions = { width: 60, height: 'auto' };
|
||||
const defaultImageDimensions = { width: 600, height: 'auto' };
|
||||
|
|
|
|||
|
|
@ -200,7 +200,6 @@ function WidgetChart(props: Props) {
|
|||
const payload = {
|
||||
...params,
|
||||
..._metric.toJson(),
|
||||
viewType: 'lineChart',
|
||||
};
|
||||
fetchMetricChartData(
|
||||
_metric,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import { useTranslation } from 'react-i18next';
|
|||
const initTableProps = [
|
||||
{
|
||||
title: <span className="font-medium">Series</span>,
|
||||
_pureTitle: 'Series',
|
||||
dataIndex: 'seriesName',
|
||||
key: 'seriesName',
|
||||
sorter: (a, b) => a.seriesName.localeCompare(b.seriesName),
|
||||
|
|
@ -18,6 +19,7 @@ const initTableProps = [
|
|||
},
|
||||
{
|
||||
title: <span className="font-medium">Avg.</span>,
|
||||
_pureTitle: 'Avg.',
|
||||
dataIndex: 'average',
|
||||
key: 'average',
|
||||
sorter: (a, b) => a.average - b.average,
|
||||
|
|
@ -94,6 +96,8 @@ function WidgetDatatable(props: Props) {
|
|||
tableCols.push({
|
||||
title: <span className="font-medium">{name}</span>,
|
||||
dataIndex: `${name}_${i}`,
|
||||
// @ts-ignore
|
||||
_pureTitle: name,
|
||||
key: `${name}_${i}`,
|
||||
sorter: (a, b) => a[`${name}_${i}`] - b[`${name}_${i}`],
|
||||
});
|
||||
|
|
|
|||
|
|
@ -66,8 +66,23 @@ export default observer(WidgetFormNew);
|
|||
|
||||
const FilterSection = observer(
|
||||
({ layout, metric, excludeFilterKeys, excludeCategory }: any) => {
|
||||
const isTable = metric.metricType === TABLE;
|
||||
const isHeatMap = metric.metricType === HEATMAP;
|
||||
const isFunnel = metric.metricType === FUNNEL;
|
||||
const isInsights = metric.metricType === INSIGHTS;
|
||||
const isPathAnalysis = metric.metricType === USER_PATH;
|
||||
const isRetention = metric.metricType === RETENTION;
|
||||
const canAddSeries = metric.series.length < 3;
|
||||
|
||||
const isSingleSeries =
|
||||
isTable ||
|
||||
isFunnel ||
|
||||
isHeatMap ||
|
||||
isInsights ||
|
||||
isRetention ||
|
||||
isPathAnalysis;
|
||||
const { t } = useTranslation();
|
||||
const allOpen = layout.startsWith('flex-row');
|
||||
const allOpen = isSingleSeries || layout.startsWith('flex-row');
|
||||
const defaultClosed = React.useRef(!allOpen && metric.exists());
|
||||
const [seriesCollapseState, setSeriesCollapseState] = React.useState<
|
||||
Record<number, boolean>
|
||||
|
|
@ -84,21 +99,6 @@ const FilterSection = observer(
|
|||
});
|
||||
setSeriesCollapseState(defaultSeriesCollapseState);
|
||||
}, [metric.series]);
|
||||
const isTable = metric.metricType === TABLE;
|
||||
const isHeatMap = metric.metricType === HEATMAP;
|
||||
const isFunnel = metric.metricType === FUNNEL;
|
||||
const isInsights = metric.metricType === INSIGHTS;
|
||||
const isPathAnalysis = metric.metricType === USER_PATH;
|
||||
const isRetention = metric.metricType === RETENTION;
|
||||
const canAddSeries = metric.series.length < 3;
|
||||
|
||||
const isSingleSeries =
|
||||
isTable ||
|
||||
isFunnel ||
|
||||
isHeatMap ||
|
||||
isInsights ||
|
||||
isRetention ||
|
||||
isPathAnalysis;
|
||||
|
||||
const collapseAll = () => {
|
||||
setSeriesCollapseState((seriesCollapseState) => {
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ import SessionsImpactedBySlowRequests from 'App/components/Dashboard/Widgets/Pre
|
|||
import SessionsPerBrowser from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
|
||||
import { FilterKey } from 'Types/filter/filterType';
|
||||
import CallWithErrors from '../../Widgets/PredefinedWidgets/CallWithErrors';
|
||||
import SpeedIndexByLocation from '../../Widgets/PredefinedWidgets/SpeedIndexByLocation';
|
||||
import ResponseTimeDistribution from '../../Widgets/PredefinedWidgets/ResponseTimeDistribution';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
|
|
@ -49,8 +48,6 @@ function WidgetPredefinedChart(props: Props) {
|
|||
return <CallsErrors5xx data={data} metric={metric} />;
|
||||
case FilterKey.CALLS_ERRORS:
|
||||
return <CallWithErrors isTemplate={isTemplate} data={data} />;
|
||||
case FilterKey.SPEED_LOCATION:
|
||||
return <SpeedIndexByLocation data={data} />;
|
||||
default:
|
||||
return (
|
||||
<div className="h-40 color-red">{t('Widget not supported')}</div>
|
||||
|
|
|
|||
|
|
@ -1,52 +1,80 @@
|
|||
import React, { useEffect } from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { Form, Input, Loader, Icon, Message } from 'UI';
|
||||
import { Button } from 'antd';
|
||||
import { validatePassword } from 'App/validate';
|
||||
import { PASSWORD_POLICY } from 'App/constants';
|
||||
import stl from './forgotPassword.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
const recaptchaRef = React.createRef();
|
||||
const ERROR_DONT_MATCH = (t) => t("Passwords don't match.");
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
|
||||
interface Props {
|
||||
params: any;
|
||||
}
|
||||
function CreatePassword(props: Props) {
|
||||
|
||||
function CreatePassword(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { params } = props;
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { resetPassword } = userStore;
|
||||
const [error, setError] = React.useState<string | null>(null);
|
||||
const [validationError, setValidationError] = React.useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [updated, setUpdated] = React.useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = React.useState('');
|
||||
const [password, setPassword] = React.useState('');
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [validationError, setValidationError] = useState<string | null>(null);
|
||||
const [updated, setUpdated] = useState(false);
|
||||
const [passwordRepeat, setPasswordRepeat] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
|
||||
const pass = params.get('pass');
|
||||
const invitation = params.get('invitation');
|
||||
|
||||
const handleSubmit = () => {
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
if (!validatePassword(password)) {
|
||||
return;
|
||||
}
|
||||
void resetPassword({ invitation, pass, password });
|
||||
|
||||
resetPassword({
|
||||
invitation,
|
||||
pass,
|
||||
password,
|
||||
'g-recaptcha-response': token
|
||||
})
|
||||
.then(() => {
|
||||
setUpdated(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = (e: any) => {
|
||||
e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
const onSubmit = () => {
|
||||
// Validate before attempting captcha verification
|
||||
if (!validatePassword(password) || password !== passwordRepeat) {
|
||||
setValidationError(
|
||||
password !== passwordRepeat
|
||||
? ERROR_DONT_MATCH(t)
|
||||
: PASSWORD_POLICY(t)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset any previous errors
|
||||
setError(null);
|
||||
setValidationError(null);
|
||||
|
||||
submitWithCaptcha({ pass, invitation, password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
// The component will handle showing appropriate messages
|
||||
});
|
||||
};
|
||||
|
||||
const write = (e: any) => {
|
||||
|
|
@ -63,7 +91,7 @@ function CreatePassword(props: Props) {
|
|||
} else {
|
||||
setValidationError(null);
|
||||
}
|
||||
}, [passwordRepeat, password]);
|
||||
}, [passwordRepeat, password, t]);
|
||||
|
||||
return (
|
||||
<Form
|
||||
|
|
@ -73,19 +101,8 @@ function CreatePassword(props: Props) {
|
|||
>
|
||||
{!error && (
|
||||
<>
|
||||
<Loader loading={loading}>
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div data-hidden={updated} className="w-full">
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className={stl.recaptcha}>
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Form.Field>
|
||||
<label>{t('New password')}</label>
|
||||
<Input
|
||||
|
|
@ -132,10 +149,15 @@ function CreatePassword(props: Props) {
|
|||
<Button
|
||||
htmlType="submit"
|
||||
type="primary"
|
||||
loading={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha || validationError !== null}
|
||||
className="w-full mt-4"
|
||||
>
|
||||
{t('Create')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Create')}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
|
|
@ -153,4 +175,4 @@ function CreatePassword(props: Props) {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(CreatePassword);
|
||||
export default withCaptcha(observer(CreatePassword));
|
||||
|
|
|
|||
|
|
@ -1,24 +1,26 @@
|
|||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import { Loader, Icon } from 'UI';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { Form, Input, Button, Typography } from 'antd';
|
||||
import { SquareArrowOutUpRight } from 'lucide-react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
|
||||
function ResetPasswordRequest() {
|
||||
interface Props {
|
||||
}
|
||||
|
||||
function ResetPasswordRequest(props: Props & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const { userStore } = useStore();
|
||||
const { loading } = userStore;
|
||||
const { requestResetPassword } = userStore;
|
||||
const recaptchaRef = React.createRef();
|
||||
const [requested, setRequested] = React.useState(false);
|
||||
const [email, setEmail] = React.useState('');
|
||||
const [error, setError] = React.useState(null);
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
const { CAPTCHA_SITE_KEY } = window.env;
|
||||
const [smtpError, setSmtpError] = React.useState<boolean>(false);
|
||||
const [requested, setRequested] = useState(false);
|
||||
const [email, setEmail] = useState('');
|
||||
const [error, setError] = useState(null);
|
||||
const [smtpError, setSmtpError] = useState<boolean>(false);
|
||||
|
||||
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
|
||||
|
||||
const write = (e: any) => {
|
||||
const { name, value } = e.target;
|
||||
|
|
@ -26,22 +28,21 @@ function ResetPasswordRequest() {
|
|||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
// e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
// Validation check
|
||||
if (!email || email.trim() === '') {
|
||||
return;
|
||||
}
|
||||
|
||||
submitWithCaptcha({ email: email.trim() })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha verification failed:', error);
|
||||
});
|
||||
};
|
||||
|
||||
const handleSubmit = (token?: any) => {
|
||||
if (
|
||||
CAPTCHA_ENABLED &&
|
||||
recaptchaRef.current &&
|
||||
(token === null || token === undefined)
|
||||
)
|
||||
return;
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
setError(null);
|
||||
requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token })
|
||||
.catch((err: any) => {
|
||||
|
|
@ -50,29 +51,21 @@ function ResetPasswordRequest() {
|
|||
}
|
||||
|
||||
setError(err.message);
|
||||
// Reset captcha for the next attempt
|
||||
resetCaptcha();
|
||||
})
|
||||
.finally(() => {
|
||||
setRequested(true);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
onFinish={onSubmit}
|
||||
style={{ minWidth: '50%' }}
|
||||
className="flex flex-col"
|
||||
>
|
||||
<Loader loading={false}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<div className="flex justify-center">
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
data-hidden={requested}
|
||||
sitekey={CAPTCHA_SITE_KEY}
|
||||
onChange={(token: any) => handleSubmit(token)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
{!requested && (
|
||||
<>
|
||||
<Form.Item>
|
||||
|
|
@ -92,10 +85,14 @@ function ResetPasswordRequest() {
|
|||
<Button
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Email Password Reset Link')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Processing...')
|
||||
: t('Email Password Reset Link')}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
|
@ -146,4 +143,4 @@ function ResetPasswordRequest() {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(ResetPasswordRequest);
|
||||
export default withCaptcha(observer(ResetPasswordRequest));
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
import { Button, Dropdown, MenuProps, Space, Typography } from 'antd';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { Button, Dropdown, MenuProps, Typography } from 'antd';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CaretDownOutlined } from '@ant-design/icons';
|
||||
import { Languages } from 'lucide-react';
|
||||
import { Icon } from '../ui';
|
||||
import { ChevronDown } from 'lucide-react';
|
||||
|
||||
const langs = [
|
||||
{ code: 'en', label: 'English' },
|
||||
|
|
@ -12,14 +10,25 @@ const langs = [
|
|||
{ code: 'ru', label: 'Русский' },
|
||||
{ code: 'zh', label: '中國人' },
|
||||
];
|
||||
const langLabels = {
|
||||
en: 'English',
|
||||
fr: 'Français',
|
||||
es: 'Español',
|
||||
ru: 'Русский',
|
||||
zh: '中國人',
|
||||
}
|
||||
|
||||
function LanguageSwitcher() {
|
||||
const { i18n } = useTranslation();
|
||||
const [selected, setSelected] = React.useState(i18n.language);
|
||||
|
||||
const handleChangeLanguage = useCallback((lang: string) => {
|
||||
i18n.changeLanguage(lang);
|
||||
localStorage.setItem('i18nextLng', lang);
|
||||
}, []);
|
||||
const onChange = (val: string) => {
|
||||
setSelected(val)
|
||||
}
|
||||
const handleChangeLanguage = () => {
|
||||
void i18n.changeLanguage(selected)
|
||||
localStorage.setItem('i18nextLng', selected)
|
||||
}
|
||||
|
||||
const menuItems: MenuProps['items'] = langs.map((lang) => ({
|
||||
key: lang.code,
|
||||
|
|
@ -31,21 +40,31 @@ function LanguageSwitcher() {
|
|||
}));
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
menu={{
|
||||
items: menuItems,
|
||||
selectable: true,
|
||||
defaultSelectedKeys: [i18n.language],
|
||||
style: {
|
||||
maxHeight: 500,
|
||||
overflowY: 'auto',
|
||||
},
|
||||
onClick: (e) => handleChangeLanguage(e.key),
|
||||
}}
|
||||
placement="bottomLeft"
|
||||
>
|
||||
<Button icon={<Languages size={12} />} />
|
||||
</Dropdown>
|
||||
<div className={'flex flex-col gap-2 align-start'}>
|
||||
<div className={'font-semibold'}>{i18n.t('Language')}</div>
|
||||
<Dropdown
|
||||
menu={{
|
||||
items: menuItems,
|
||||
selectable: true,
|
||||
defaultSelectedKeys: [i18n.language],
|
||||
style: {
|
||||
maxHeight: 500,
|
||||
overflowY: 'auto',
|
||||
},
|
||||
onClick: (e) => onChange(e.key),
|
||||
}}
|
||||
>
|
||||
<Button>
|
||||
<div className={'flex justify-between items-center gap-8'}>
|
||||
<span>{langLabels[selected]}</span>
|
||||
<ChevronDown size={14} />
|
||||
</div>
|
||||
</Button>
|
||||
</Dropdown>
|
||||
<Button className={'w-fit'} onClick={handleChangeLanguage}>
|
||||
{i18n.t('Update')}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +1,18 @@
|
|||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import cn from 'classnames';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
// Consider using a different approach for titles in functional components
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { toast } from 'react-toastify';
|
||||
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import { forgotPassword, signup } from 'App/routes';
|
||||
import { Icon, Link, Loader, Tooltip } from 'UI';
|
||||
import { Icon, Link, Loader } from 'UI';
|
||||
import { Button, Form, Input } from 'antd';
|
||||
|
||||
import Copyright from 'Shared/Copyright';
|
||||
|
||||
import stl from './login.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useStore } from 'App/mstore';
|
||||
import LanguageSwitcher from '../LanguageSwitcher';
|
||||
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
|
||||
import SSOLogin from './SSOLogin';
|
||||
|
||||
const FORGOT_PASSWORD = forgotPassword();
|
||||
const SIGNUP_ROUTE = signup();
|
||||
|
|
@ -26,14 +21,15 @@ interface LoginProps {
|
|||
location: Location;
|
||||
}
|
||||
|
||||
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
|
||||
|
||||
function Login({ location }: LoginProps) {
|
||||
function Login({
|
||||
location,
|
||||
submitWithCaptcha,
|
||||
isVerifyingCaptcha,
|
||||
resetCaptcha,
|
||||
}: LoginProps & WithCaptchaProps) {
|
||||
const { t } = useTranslation();
|
||||
const [email, setEmail] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
// const CAPTCHA_ENABLED = useMemo(() => window.env.CAPTCHA_ENABLED === 'true', []);
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
const { loginStore, userStore } = useStore();
|
||||
const { errors } = userStore.loginRequest;
|
||||
const { loading } = loginStore;
|
||||
|
|
@ -49,7 +45,6 @@ function Login({ location }: LoginProps) {
|
|||
}, [authDetails]);
|
||||
|
||||
useEffect(() => {
|
||||
// void fetchTenants();
|
||||
const jwt = params.get('jwt');
|
||||
const spotJwt = params.get('spotJwt');
|
||||
if (spotJwt) {
|
||||
|
|
@ -71,7 +66,8 @@ function Login({ location }: LoginProps) {
|
|||
if (event.data.type === 'orspot:logged') {
|
||||
clearInterval(int);
|
||||
window.removeEventListener('message', onSpotMsg);
|
||||
toast.success(t('You have been logged into Spot successfully'));
|
||||
const msg = t('You have been logged into Spot successfully')
|
||||
toast.success(msg);
|
||||
}
|
||||
};
|
||||
window.addEventListener('message', onSpotMsg);
|
||||
|
|
@ -108,32 +104,36 @@ function Login({ location }: LoginProps) {
|
|||
if (resp) {
|
||||
userStore.syntheticLogin(resp);
|
||||
setJwt({ jwt: resp.jwt, spotJwt: resp.spotJwt ?? null });
|
||||
handleSpotLogin(resp.spotJwt);
|
||||
if (resp.spotJwt) {
|
||||
handleSpotLogin(resp.spotJwt);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
userStore.syntheticLoginError(e);
|
||||
resetCaptcha();
|
||||
});
|
||||
};
|
||||
|
||||
const onSubmit = () => {
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
if (!email || !password) {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
const ssoLink =
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
submitWithCaptcha({ email: email.trim(), password })
|
||||
.then((data) => {
|
||||
handleSubmit(data['g-recaptcha-response']);
|
||||
})
|
||||
.catch((error: any) => {
|
||||
console.error('Captcha error:', error);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center h-screen">
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="m-10 ">
|
||||
<img src="/assets/logo.svg" width={200} />
|
||||
<img src="/assets/logo.svg" width={200} alt="Company Logo" />
|
||||
</div>
|
||||
<div className="border rounded-lg bg-white shadow-sm">
|
||||
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
|
||||
|
|
@ -145,15 +145,7 @@ function Login({ location }: LoginProps) {
|
|||
className={cn('flex items-center justify-center flex-col')}
|
||||
style={{ width: '350px' }}
|
||||
>
|
||||
<Loader loading={loading}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={window.env.CAPTCHA_SITE_KEY}
|
||||
onChange={(token) => handleSubmit(token)}
|
||||
/>
|
||||
)}
|
||||
<Loader loading={loading || isVerifyingCaptcha}>
|
||||
<div style={{ width: '350px' }} className="px-8">
|
||||
<Form.Item>
|
||||
<label>{t('Email Address')}</label>
|
||||
|
|
@ -186,8 +178,8 @@ function Login({ location }: LoginProps) {
|
|||
</Loader>
|
||||
{errors && errors.length ? (
|
||||
<div className="px-8 my-2 w-full">
|
||||
{errors.map((error) => (
|
||||
<div className="flex items-center bg-red-lightest rounded p-3">
|
||||
{errors.map((error, index) => (
|
||||
<div key={index} className="flex items-center bg-red-lightest rounded p-3">
|
||||
<Icon name="info" color="red" size="20" />
|
||||
<span className="color-red ml-2">
|
||||
{error}
|
||||
|
|
@ -204,8 +196,14 @@ function Login({ location }: LoginProps) {
|
|||
className="mt-2 w-full text-center rounded-lg"
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
loading={loading || isVerifyingCaptcha}
|
||||
disabled={loading || isVerifyingCaptcha}
|
||||
>
|
||||
{t('Login')}
|
||||
{isVerifyingCaptcha
|
||||
? t('Verifying...')
|
||||
: loading
|
||||
? t('Logging in...')
|
||||
: t('Login')}
|
||||
</Button>
|
||||
|
||||
<div className="my-8 flex justify-center items-center flex-wrap">
|
||||
|
|
@ -219,63 +217,12 @@ function Login({ location }: LoginProps) {
|
|||
</div>
|
||||
</Form>
|
||||
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
delay={0}
|
||||
title={
|
||||
<div className="text-center">
|
||||
{authDetails.edition === 'ee' ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
className="pointer-events-none opacity-30"
|
||||
>
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider
|
||||
? `(${authDetails.ssoProvider})`
|
||||
: ''
|
||||
}`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn('flex items-center w-96 justify-center my-8', {
|
||||
'!hidden': !authDetails?.enforceSSO,
|
||||
})}
|
||||
>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">
|
||||
{`${t('Login with SSO')} ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
<SSOLogin authDetails={authDetails} />
|
||||
</div>
|
||||
|
||||
{authDetails?.enforceSSO && (
|
||||
<SSOLogin authDetails={authDetails} enforceSSO={true} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
@ -287,4 +234,6 @@ function Login({ location }: LoginProps) {
|
|||
);
|
||||
}
|
||||
|
||||
export default withPageTitle('Login - OpenReplay')(observer(Login));
|
||||
export default withPageTitle('Login - OpenReplay')(
|
||||
withCaptcha(observer(Login))
|
||||
);
|
||||
|
|
|
|||
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
78
frontend/app/components/Login/SSOLogin.tsx
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import React from 'react';
|
||||
import cn from 'classnames';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import stl from './login.module.css';
|
||||
import { useStore } from 'App/mstore';
|
||||
|
||||
interface SSOLoginProps {
|
||||
authDetails: any;
|
||||
enforceSSO?: boolean;
|
||||
}
|
||||
|
||||
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||
const { userStore } = useStore();
|
||||
const { t } = useTranslation();
|
||||
const { isSSOSupported } = userStore;
|
||||
|
||||
const getSSOLink = () =>
|
||||
window !== window.top
|
||||
? `${window.location.origin}/api/sso/saml2?iFrame=true`
|
||||
: `${window.location.origin}/api/sso/saml2`;
|
||||
|
||||
const ssoLink = getSSOLink();
|
||||
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`;
|
||||
|
||||
if (enforceSSO) {
|
||||
return (
|
||||
<div className={cn('flex items-center w-96 justify-center my-8')}>
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="primary">{ssoButtonText}</Button>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href={ssoLink} rel="noopener noreferrer">
|
||||
<Button type="text" htmlType="submit">
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
title={
|
||||
<div className="text-center">
|
||||
{isSSOSupported ? (
|
||||
<span>
|
||||
{t('SSO has not been configured.')}
|
||||
<br />
|
||||
{t('Please reach out to your admin.')}
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED(t)
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<span className="cursor-not-allowed">
|
||||
<Button
|
||||
type="text"
|
||||
htmlType="submit"
|
||||
disabled={true}
|
||||
>
|
||||
{ssoButtonText}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SSOLogin;
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue