Merge branch 'dev' of https://github.com/openreplay/openreplay into add_incident_event

This commit is contained in:
Андрей Бабушкин 2025-05-26 17:18:22 +02:00
commit e880c27e54
162 changed files with 5230 additions and 2456 deletions

33
.github/workflows/frontend-tests.yaml vendored Normal file
View file

@ -0,0 +1,33 @@
name: Frontend tests
on:
pull_request:
paths:
- 'frontend/**'
- '.github/workflows/frontend-test.yaml'
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install dependencies
working-directory: frontend
run: yarn
- name: Run tests
working-directory: frontend
run: yarn test:ci
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
directory: frontend/coverage/

View file

@ -8,7 +8,11 @@ on:
required: true
default: 'chalice,frontend'
tag:
description: 'Tag to build patches from.'
description: 'Tag to update.'
required: true
type: string
branch:
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
required: true
type: string
@ -73,7 +77,7 @@ jobs:
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
run: echo "BRANCH_NAME=patch/main/${HEAD_COMMIT_ID}" >> $GITHUB_ENV
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
- name: Build
id: build-image

View file

@ -20,12 +20,20 @@ jobs:
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 1
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Rebase with main branch, to make sure the code has latest main changes
if: github.ref != 'refs/heads/main'
run: |
git pull --rebase origin main
git remote -v
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
git config --global rebase.autoStash true
git fetch origin main:main
git rebase main
git log -3
- name: Downloading yq
run: |
@ -48,6 +56,8 @@ jobs:
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
@ -100,7 +110,7 @@ jobs:
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $BUILD_SCRIPT_NAME >> /tmp/managed_${service}.txt 2>&1 || { echo "Build failed for $service"; cat /tmp/managed_${service}.txt; exit 1; }
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt

View file

@ -4,25 +4,24 @@ verify_ssl = true
name = "pypi"
[packages]
urllib3 = "==2.3.0"
urllib3 = "==2.4.0"
requests = "==2.32.3"
boto3 = "==1.37.21"
boto3 = "==1.38.16"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.2"
psycopg = {extras = ["binary", "pool"], version = "==3.2.9"}
clickhouse-connect = "==0.8.17"
elasticsearch = "==9.0.1"
jira = "==3.8.0"
cachetools = "==5.5.2"
fastapi = "==0.115.12"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
uvicorn = {extras = ["standard"], version = "==0.34.2"}
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}
pydantic = {extras = ["email"], version = "==2.11.4"}
apscheduler = "==3.11.0"
redis = "==5.2.1"
redis = "==6.1.0"
[dev-packages]
[requires]
python_version = "3.12"
python_full_version = "3.12.8"

View file

@ -1,10 +1,9 @@
import logging
import schemas
from chalicelib.core import countries, events, metadata
from chalicelib.core import countries, metadata
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.event_filter_definition import Event
from chalicelib.utils.or_cache import CachedResponse
logger = logging.getLogger(__name__)
TABLE = "public.autocomplete"
@ -113,10 +112,10 @@ def __generic_query(typename, value_length=None):
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def __generic_autocomplete(event: str):
def f(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
query = __generic_query(event.ui_type, value_length=len(value))
query = __generic_query(event, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
cur.execute(cur.mogrify(query, params))
@ -149,8 +148,8 @@ def __errors_query(source=None, value_length=None):
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
@ -161,8 +160,8 @@ def __errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
@ -173,8 +172,8 @@ def __errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(value)s
@ -185,8 +184,8 @@ def __errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(value)s
@ -196,8 +195,8 @@ def __errors_query(source=None, value_length=None):
return f"""((SELECT DISTINCT ON(lg.message)
lg.message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.message ILIKE %(svalue)s
@ -208,8 +207,8 @@ def __errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
FROM {events.EventType.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR}' AS type
FROM events.errors INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.name ILIKE %(svalue)s
@ -234,8 +233,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
if len(value) > 2:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -244,8 +243,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -254,8 +253,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -264,8 +263,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -274,8 +273,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
else:
query = f"""(SELECT DISTINCT ON(lg.reason)
lg.reason AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -284,8 +283,8 @@ def __search_errors_mobile(project_id, value, key=None, source=None):
UNION ALL
(SELECT DISTINCT ON(lg.name)
lg.name AS value,
'{events.EventType.CRASH_MOBILE.ui_type}' AS type
FROM {events.EventType.CRASH_MOBILE.table} INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
'{schemas.EventType.ERROR_MOBILE}' AS type
FROM events_common.crashes INNER JOIN public.crashes_ios AS lg USING (crash_ios_id) LEFT JOIN public.sessions AS s USING(session_id)
WHERE
s.project_id = %(project_id)s
AND lg.project_id = %(project_id)s
@ -377,7 +376,6 @@ def is_top_supported(event_type):
return TYPE_TO_COLUMN.get(event_type, False)
@CachedResponse(table="or_cache.autocomplete_top_values", ttl=5 * 60)
def get_top_values(project_id, event_type, event_key=None):
with pg_client.PostgresClient() as cur:
if schemas.FilterType.has_value(event_type):

View file

@ -1,10 +1,9 @@
import logging
import schemas
from chalicelib.core import countries, events, metadata
from chalicelib.core import countries, metadata
from chalicelib.utils import ch_client
from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils.event_filter_definition import Event
from chalicelib.utils.or_cache import CachedResponse
logger = logging.getLogger(__name__)
TABLE = "experimental.autocomplete"
@ -114,7 +113,7 @@ def __generic_query(typename, value_length=None):
LIMIT 10;"""
def __generic_autocomplete(event: Event):
def __generic_autocomplete(event: str):
def f(project_id, value, key=None, source=None):
with ch_client.ClickHouseClient() as cur:
query = __generic_query(event.ui_type, value_length=len(value))
@ -150,7 +149,7 @@ def __pg_errors_query(source=None, value_length=None):
return f"""((SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -162,7 +161,7 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -173,7 +172,7 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -184,7 +183,7 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -194,7 +193,7 @@ def __pg_errors_query(source=None, value_length=None):
return f"""((SELECT DISTINCT ON(message)
message AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -205,7 +204,7 @@ def __pg_errors_query(source=None, value_length=None):
(SELECT DISTINCT ON(name)
name AS value,
source,
'{events.EventType.ERROR.ui_type}' AS type
'{schemas.EventType.ERROR}' AS type
FROM {MAIN_TABLE}
WHERE
project_id = %(project_id)s
@ -260,8 +259,9 @@ def __search_metadata(project_id, value, key=None, source=None):
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
LIMIT 5;""",
parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})
results = cur.execute(query)
return helper.list_to_camel_case(results)
@ -298,7 +298,6 @@ def is_top_supported(event_type):
return TYPE_TO_COLUMN.get(event_type, False)
@CachedResponse(table="or_cache.autocomplete_top_values", ttl=5 * 60)
def get_top_values(project_id, event_type, event_key=None):
with ch_client.ClickHouseClient() as cur:
if schemas.FilterType.has_value(event_type):

View file

@ -1,226 +0,0 @@
from functools import cache
from typing import Optional
import schemas
from chalicelib.core import issues
from chalicelib.core.autocomplete import autocomplete
from chalicelib.core.sessions import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CUSTOM' AS type
FROM events_common.customs AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
def __merge_cells(rows, start, count, replacement):
rows[start] = replacement
rows = rows[:start + 1] + rows[start + count:]
return rows
def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0:
return rows
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
if rows[i]["timestamp"] == c["timestamp"]:
rows = __merge_cells(rows=rows,
start=i,
count=merge_count,
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
break
return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.CLICK:
cur.execute(cur.mogrify("""\
SELECT
c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.INPUT:
cur.execute(cur.mogrify("""
SELECT
i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE
i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.LOCATION:
cur.execute(cur.mogrify("""\
SELECT
l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
def _search_tags(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
query = f"""
SELECT public.tags.name
'TAG' AS type
FROM public.tags
WHERE public.tags.project_id = %(project_id)s
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
LIMIT 10
"""
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
class EventType:
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
# MOBILE
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
@cache
def supported_types():
return {
EventType.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK),
query=autocomplete.__generic_query(typename=EventType.CLICK.ui_type)),
EventType.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT),
query=autocomplete.__generic_query(typename=EventType.INPUT.ui_type)),
EventType.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.LOCATION),
query=autocomplete.__generic_query(
typename=EventType.LOCATION.ui_type)),
EventType.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CUSTOM),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM.ui_type)),
EventType.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST),
query=autocomplete.__generic_query(
typename=EventType.REQUEST.ui_type)),
EventType.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=EventType.GRAPHQL.ui_type)),
EventType.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=EventType.STATEACTION.ui_type)),
EventType.TAG.ui_type: SupportedFilter(get=_search_tags, query=None),
EventType.ERROR.ui_type: SupportedFilter(get=autocomplete.__search_errors,
query=None),
EventType.METADATA.ui_type: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# MOBILE
EventType.CLICK_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.CLICK_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.CLICK_MOBILE.ui_type)),
EventType.SWIPE_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.SWIPE_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.SWIPE_MOBILE.ui_type)),
EventType.INPUT_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.INPUT_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.INPUT_MOBILE.ui_type)),
EventType.VIEW_MOBILE.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.VIEW_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.VIEW_MOBILE.ui_type)),
EventType.CUSTOM_MOBILE.ui_type: SupportedFilter(
get=autocomplete.__generic_autocomplete(EventType.CUSTOM_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.CUSTOM_MOBILE.ui_type)),
EventType.REQUEST_MOBILE.ui_type: SupportedFilter(
get=autocomplete.__generic_autocomplete(EventType.REQUEST_MOBILE),
query=autocomplete.__generic_query(
typename=EventType.REQUEST_MOBILE.ui_type)),
EventType.CRASH_MOBILE.ui_type: SupportedFilter(get=autocomplete.__search_errors_mobile,
query=None),
}
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM {EventType.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in supported_types().keys():
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
elif event_type + "_MOBILE" in supported_types().keys():
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_MOBILE") \
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
else:
return {"errors": ["unsupported event"]}
return {"data": rows}

View file

@ -0,0 +1,11 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_EVENTS", cast=bool, default=False):
logger.info(">>> Using experimental events replay")
from . import events_ch as events
else:
from . import events_pg as events

View file

@ -0,0 +1,97 @@
from chalicelib.utils import ch_client
from .events_pg import *
def __explode_properties(rows):
for i in range(len(rows)):
rows[i] = {**rows[i], **rows[i]["$properties"]}
rows[i].pop("$properties")
return rows
def get_customs_by_session_id(session_id, project_id):
with ch_client.ClickHouseClient() as cur:
rows = cur.execute(""" \
SELECT `$properties`,
created_at,
'CUSTOM' AS type
FROM product_analytics.events
WHERE session_id = %(session_id)s
AND NOT `$auto_captured`
AND `$event_name`!='INCIDENT'
ORDER BY created_at;""",
{"project_id": project_id, "session_id": session_id})
rows = __explode_properties(rows)
return helper.list_to_camel_case(rows)
def __merge_cells(rows, start, count, replacement):
rows[start] = replacement
rows = rows[:start + 1] + rows[start + count:]
return rows
def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0:
return rows
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
if rows[i]["created_at"] == c["createdAt"]:
rows = __merge_cells(rows=rows,
start=i,
count=merge_count,
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
break
return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with ch_client.ClickHouseClient() as cur:
select_events = ('CLICK', 'INPUT', 'LOCATION')
if event_type is not None:
select_events = (event_type,)
query = cur.format(query=""" \
SELECT created_at,
`$properties`,
`$event_name` AS type
FROM product_analytics.events
WHERE session_id = %(session_id)s
AND `$event_name` IN %(select_events)s
AND `$auto_captured`
ORDER BY created_at;""",
parameters={"project_id": project_id, "session_id": session_id,
"select_events": select_events})
rows = cur.execute(query)
rows = __explode_properties(rows)
if group_clickrage and 'CLICK' in select_events:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["createdAt"])
return rows
def get_incidents_by_session_id(session_id, project_id):
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=""" \
SELECT created_at,
`$properties`,
`$event_name` AS type
FROM product_analytics.events
WHERE session_id = %(session_id)s
AND `$event_name` = 'INCIDENT'
AND `$auto_captured`
ORDER BY created_at;""",
parameters={"project_id": project_id, "session_id": session_id})
rows = cur.execute(query)
rows = __explode_properties(rows)
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["createdAt"])
return rows

View file

@ -1,5 +1,5 @@
from chalicelib.utils import pg_client, helper
from chalicelib.core import events
from . import events
def get_customs_by_session_id(session_id, project_id):
@ -58,7 +58,7 @@ def get_crashes_by_session_id(session_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
FROM {events.EventType.CRASH_MOBILE.table} AS cr
FROM events_common.crashes AS cr
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
INNER JOIN public.sessions AS s USING (session_id)
WHERE

View file

@ -0,0 +1,209 @@
import logging
from functools import cache
from typing import Optional
import schemas
from chalicelib.core.autocomplete import autocomplete
from chalicelib.core.issues import issues
from chalicelib.core.sessions import sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter
logger = logging.getLogger(__name__)
def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(""" \
SELECT c.*,
'CUSTOM' AS type
FROM events_common.customs AS c
WHERE c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
def __merge_cells(rows, start, count, replacement):
rows[start] = replacement
rows = rows[:start + 1] + rows[start + count:]
return rows
def __get_grouped_clickrage(rows, session_id, project_id):
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
if len(click_rage_issues) == 0:
return rows
for c in click_rage_issues:
merge_count = c.get("payload")
if merge_count is not None:
merge_count = merge_count.get("Count", 3)
else:
merge_count = 3
for i in range(len(rows)):
if rows[i]["timestamp"] == c["timestamp"]:
rows = __merge_cells(rows=rows,
start=i,
count=merge_count,
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
break
return rows
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.CLICK:
cur.execute(cur.mogrify(""" \
SELECT c.*,
'CLICK' AS type
FROM events.clicks AS c
WHERE c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.INPUT:
cur.execute(cur.mogrify("""
SELECT i.*,
'INPUT' AS type
FROM events.inputs AS i
WHERE i.session_id = %(session_id)s
ORDER BY i.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.LOCATION:
cur.execute(cur.mogrify(""" \
SELECT l.*,
l.path AS value,
l.path AS url,
'LOCATION' AS type
FROM events.pages AS l
WHERE
l.session_id = %(session_id)s
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
return rows
def _search_tags(project_id, value, key=None, source=None):
with pg_client.PostgresClient() as cur:
query = f"""
SELECT public.tags.name
'TAG' AS type
FROM public.tags
WHERE public.tags.project_id = %(project_id)s
ORDER BY SIMILARITY(public.tags.name, %(value)s) DESC
LIMIT 10
"""
query = cur.mogrify(query, {'project_id': project_id, 'value': value})
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
return results
@cache
def supported_types():
return {
schemas.EventType.CLICK: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK),
query=autocomplete.__generic_query(typename=schemas.EventType.CLICK)),
schemas.EventType.INPUT: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT),
query=autocomplete.__generic_query(typename=schemas.EventType.INPUT)),
schemas.EventType.LOCATION: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.LOCATION),
query=autocomplete.__generic_query(
typename=schemas.EventType.LOCATION)),
schemas.EventType.CUSTOM: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM),
query=autocomplete.__generic_query(
typename=schemas.EventType.CUSTOM)),
schemas.EventType.REQUEST: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST),
query=autocomplete.__generic_query(
typename=schemas.EventType.REQUEST)),
schemas.EventType.GRAPHQL: SupportedFilter(get=autocomplete.__generic_autocomplete(schemas.EventType.GRAPHQL),
query=autocomplete.__generic_query(
typename=schemas.EventType.GRAPHQL)),
schemas.EventType.STATE_ACTION: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.STATEACTION),
query=autocomplete.__generic_query(
typename=schemas.EventType.STATE_ACTION)),
schemas.EventType.TAG: SupportedFilter(get=_search_tags, query=None),
schemas.EventType.ERROR: SupportedFilter(get=autocomplete.__search_errors,
query=None),
schemas.FilterType.METADATA: SupportedFilter(get=autocomplete.__search_metadata,
query=None),
# MOBILE
schemas.EventType.CLICK_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.CLICK_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.CLICK_MOBILE)),
schemas.EventType.SWIPE_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.SWIPE_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.SWIPE_MOBILE)),
schemas.EventType.INPUT_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.INPUT_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.INPUT_MOBILE)),
schemas.EventType.VIEW_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.VIEW_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.VIEW_MOBILE)),
schemas.EventType.CUSTOM_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.CUSTOM_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.CUSTOM_MOBILE)),
schemas.EventType.REQUEST_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete(schemas.EventType.REQUEST_MOBILE),
query=autocomplete.__generic_query(
typename=schemas.EventType.REQUEST_MOBILE)),
schemas.EventType.ERROR_MOBILE: SupportedFilter(get=autocomplete.__search_errors_mobile,
query=None),
}
def get_errors_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
FROM events.errors AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
errors = cur.fetchall()
for e in errors:
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
return helper.list_to_camel_case(errors)
def get_incidents_by_session_id(session_id, project_id):
logger.warning("INCIDENTS not supported in PG")
return []
def search(text, event_type, project_id, source, key):
if not event_type:
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
if event_type in supported_types().keys():
rows = supported_types()[event_type].get(project_id=project_id, value=text, key=key, source=source)
elif event_type + "_MOBILE" in supported_types().keys():
rows = supported_types()[event_type + "_MOBILE"].get(project_id=project_id, value=text, key=key, source=source)
elif event_type in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_IOS") \
and event_type[:-len("_IOS")] in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.endswith("_MOBILE") \
and event_type[:-len("_MOBILE")] in sessions_metas.supported_types().keys():
return sessions_metas.search(text, event_type, project_id)
else:
return {"errors": ["unsupported event"]}
return {"data": rows}

View file

@ -0,0 +1,11 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_EVENTS", cast=bool, default=False):
logger.info(">>> Using experimental issues")
from . import issues_ch as issues
else:
from . import issues_pg as issues

View file

@ -0,0 +1,56 @@
from chalicelib.utils import ch_client, helper
import datetime
from .issues_pg import get_all_types
def get(project_id, issue_id):
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=""" \
SELECT *
FROM product_analytics.events
WHERE project_id = %(project_id)s
AND issue_id = %(issue_id)s;""",
parameters={"project_id": project_id, "issue_id": issue_id})
data = cur.execute(query=query)
if data is not None and len(data) > 0:
data = data[0]
data["title"] = helper.get_issue_title(data["type"])
return helper.dict_to_camel_case(data)
def get_by_session_id(session_id, project_id, issue_type=None):
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""\
SELECT *
FROM product_analytics.events
WHERE session_id = %(session_id)s
AND project_id= %(project_id)s
AND `$event_name`='ISSUE'
{"AND issue_type = %(type)s" if issue_type is not None else ""}
ORDER BY created_at;""",
parameters={"session_id": session_id, "project_id": project_id, "type": issue_type})
data = cur.execute(query)
return helper.list_to_camel_case(data)
# To reduce the number of issues in the replay;
# will be removed once we agree on how to show issues
def reduce_issues(issues_list):
if issues_list is None:
return None
i = 0
# remove same-type issues if the time between them is <2s
while i < len(issues_list) - 1:
for j in range(i + 1, len(issues_list)):
if issues_list[i]["issueType"] == issues_list[j]["issueType"]:
break
else:
i += 1
break
if issues_list[i]["createdAt"] - issues_list[j]["createdAt"] < datetime.timedelta(seconds=2):
issues_list.pop(j)
else:
i += 1
return issues_list

View file

@ -4,12 +4,11 @@ from chalicelib.utils import pg_client, helper
def get(project_id, issue_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""\
SELECT
*
""" \
SELECT *
FROM public.issues
WHERE project_id = %(project_id)s
AND issue_id = %(issue_id)s;""",
AND issue_id = %(issue_id)s;""",
{"project_id": project_id, "issue_id": issue_id}
)
cur.execute(query=query)
@ -35,6 +34,29 @@ def get_by_session_id(session_id, project_id, issue_type=None):
return helper.list_to_camel_case(cur.fetchall())
# To reduce the number of issues in the replay;
# will be removed once we agree on how to show issues
def reduce_issues(issues_list):
if issues_list is None:
return None
i = 0
# remove same-type issues if the time between them is <2s
while i < len(issues_list) - 1:
for j in range(i + 1, len(issues_list)):
if issues_list[i]["type"] == issues_list[j]["type"]:
break
else:
i += 1
break
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
issues_list.pop(j)
else:
i += 1
return issues_list
def get_all_types():
return [
{

View file

@ -4,7 +4,7 @@ import logging
from fastapi import HTTPException, status
import schemas
from chalicelib.core import issues
from chalicelib.core.issues import issues
from chalicelib.core.errors import errors
from chalicelib.core.metrics import heatmaps, product_analytics, funnels
from chalicelib.core.sessions import sessions, sessions_search
@ -61,6 +61,9 @@ def get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.C
return None
data.series[0].filter.filters += data.series[0].filter.events
data.series[0].filter.events = []
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
print(data.series[0].filter.model_dump())
print(">>>>>>>>>>>>>>>>>>>>>>>>><")
return heatmaps.search_short_session(project_id=project.project_id, user_id=user_id,
data=schemas.HeatMapSessionsSearch(
**data.series[0].filter.model_dump()),
@ -169,7 +172,8 @@ def get_sessions_by_card_id(project: schemas.ProjectContext, user_id, metric_id,
results = []
for s in data.series:
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id)})
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id,
metric_of=data.metric_of)})
return results
@ -184,7 +188,8 @@ def get_sessions(project: schemas.ProjectContext, user_id, data: schemas.CardSes
s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True))
results.append({"seriesId": None, "seriesName": s.name,
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id)})
**sessions_search.search_sessions(data=s.filter, project=project, user_id=user_id,
metric_of=data.metric_of)})
return results

View file

@ -3,7 +3,7 @@ import logging
from decouple import config
import schemas
from chalicelib.core import events
from chalicelib.core.events import events
from chalicelib.core.metrics.modules import sessions, sessions_mobs
from chalicelib.utils import sql_helper as sh

View file

@ -7,7 +7,8 @@ from typing import List
from psycopg2.extras import RealDictRow
import schemas
from chalicelib.core import events, metadata
from chalicelib.core import metadata
from chalicelib.core.events import events
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
@ -76,10 +77,10 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
values["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
first_stage_extra_constraints.append(
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
@ -121,31 +122,31 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
op = sh.get_sql_operator(s.operator)
# event_type = s["type"].upper()
event_type = s.type
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
if event_type == schemas.EventType.CLICK:
next_table = "events.clicks"
next_col_name = "label"
elif event_type == schemas.EventType.INPUT:
next_table = "events.inputs"
next_col_name = "label"
elif event_type == schemas.EventType.LOCATION:
next_table = "events.pages"
next_col_name = "path"
elif event_type == schemas.EventType.CUSTOM:
next_table = "events_common.customs"
next_col_name = "name"
# IOS --------------
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
next_table = events.EventType.CLICK_MOBILE.table
next_col_name = events.EventType.CLICK_MOBILE.column
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
next_table = events.EventType.INPUT_MOBILE.table
next_col_name = events.EventType.INPUT_MOBILE.column
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
next_table = events.EventType.VIEW_MOBILE.table
next_col_name = events.EventType.VIEW_MOBILE.column
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
next_table = events.EventType.CUSTOM_MOBILE.table
next_col_name = events.EventType.CUSTOM_MOBILE.column
elif event_type == schemas.EventType.CLICK_MOBILE:
next_table = "events_ios.taps"
next_col_name = "label"
elif event_type == schemas.EventType.INPUT_MOBILE:
next_table = "events_ios.inputs"
next_col_name = "label"
elif event_type == schemas.EventType.VIEW_MOBILE:
next_table = "events_ios.views"
next_col_name = "name"
elif event_type == schemas.EventType.CUSTOM_MOBILE:
next_table = "events_common.customs"
next_col_name = "name"
else:
logger.warning(f"=================UNDEFINED:{event_type}")
continue
@ -297,10 +298,10 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
values["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
filter_extra_from = [f"INNER JOIN {"events.pages"} AS p USING(session_id)"]
first_stage_extra_constraints.append(
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
if meta_keys is None:
meta_keys = metadata.get(project_id=project.project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
@ -342,31 +343,31 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
op = sh.get_sql_operator(s.operator)
# event_type = s["type"].upper()
event_type = s.type
if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column
elif event_type == events.EventType.INPUT.ui_type:
next_table = events.EventType.INPUT.table
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_table = events.EventType.LOCATION.table
next_col_name = events.EventType.LOCATION.column
elif event_type == events.EventType.CUSTOM.ui_type:
next_table = events.EventType.CUSTOM.table
next_col_name = events.EventType.CUSTOM.column
if event_type == schemas.EventType.CLICK:
next_table = "events.clicks"
next_col_name = "label"
elif event_type == schemas.EventType.INPUT:
next_table = "events.inputs"
next_col_name = "label"
elif event_type == schemas.EventType.LOCATION:
next_table = "events.pages"
next_col_name = "path"
elif event_type == schemas.EventType.CUSTOM:
next_table = "events_common.customs"
next_col_name = "name"
# IOS --------------
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
next_table = events.EventType.CLICK_MOBILE.table
next_col_name = events.EventType.CLICK_MOBILE.column
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
next_table = events.EventType.INPUT_MOBILE.table
next_col_name = events.EventType.INPUT_MOBILE.column
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
next_table = events.EventType.VIEW_MOBILE.table
next_col_name = events.EventType.VIEW_MOBILE.column
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
next_table = events.EventType.CUSTOM_MOBILE.table
next_col_name = events.EventType.CUSTOM_MOBILE.column
elif event_type == schemas.EventType.CLICK_MOBILE:
next_table = "events_ios.taps"
next_col_name = "label"
elif event_type == schemas.EventType.INPUT_MOBILE:
next_table = "events_ios.inputs"
next_col_name = "label"
elif event_type == schemas.EventType.VIEW_MOBILE:
next_table = "events_ios.views"
next_col_name = "name"
elif event_type == schemas.EventType.CUSTOM_MOBILE:
next_table = "events_common.customs"
next_col_name = "name"
else:
logger.warning(f"=================UNDEFINED:{event_type}")
continue

View file

@ -8,7 +8,7 @@ from chalicelib.utils import ch_client
from chalicelib.utils import exp_ch_helper
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
from chalicelib.core import events
from chalicelib.core.events import events
logger = logging.getLogger(__name__)
@ -82,7 +82,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
elif filter_type == schemas.FilterType.REFERRER:
constraints.append(
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
if meta_keys is None:
meta_keys = metadata.get(project_id=project.project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
@ -125,29 +125,29 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
e_k = f"e_value{i}"
event_type = s.type
next_event_type = exp_ch_helper.get_event_type(event_type, platform=platform)
if event_type == events.EventType.CLICK.ui_type:
if event_type == schemas.EventType.CLICK:
if platform == "web":
next_col_name = events.EventType.CLICK.column
next_col_name = "label"
if not is_any:
if schemas.ClickEventExtraOperator.has_value(s.operator):
specific_condition = sh.multi_conditions(f"selector {op} %({e_k})s", s.value, value_key=e_k)
else:
next_col_name = events.EventType.CLICK_MOBILE.column
elif event_type == events.EventType.INPUT.ui_type:
next_col_name = events.EventType.INPUT.column
elif event_type == events.EventType.LOCATION.ui_type:
next_col_name = "label"
elif event_type == schemas.EventType.INPUT:
next_col_name = "label"
elif event_type == schemas.EventType.LOCATION:
next_col_name = 'url_path'
elif event_type == events.EventType.CUSTOM.ui_type:
next_col_name = events.EventType.CUSTOM.column
elif event_type == schemas.EventType.CUSTOM:
next_col_name = "name"
# IOS --------------
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
next_col_name = events.EventType.CLICK_MOBILE.column
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
next_col_name = events.EventType.INPUT_MOBILE.column
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
next_col_name = events.EventType.VIEW_MOBILE.column
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
next_col_name = events.EventType.CUSTOM_MOBILE.column
elif event_type == schemas.EventType.CLICK_MOBILE:
next_col_name = "label"
elif event_type == schemas.EventType.INPUT_MOBILE:
next_col_name = "label"
elif event_type == schemas.EventType.VIEW_MOBILE:
next_col_name = "name"
elif event_type == schemas.EventType.CUSTOM_MOBILE:
next_col_name = "name"
else:
logger.warning(f"=================UNDEFINED:{event_type}")
continue

View file

@ -2,7 +2,8 @@ import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata
from chalicelib.core import metadata
from chalicelib.core.events import events
from . import performance_event, sessions_legacy
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh
@ -149,7 +150,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
"type": e.type,
"isEvent": True,
"value": [],
@ -174,7 +175,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
"type": e.type,
"isEvent": True,
"value": [],
@ -239,8 +240,10 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count,
{main_col} AS name,
count(DISTINCT session_id) AS total,
COALESCE(SUM(count(DISTINCT session_id)) OVER (), 0) AS total_count
FROM (SELECT s.session_id AS session_id {extra_col}
any(total_count) as total_count
FROM (SELECT s.session_id AS session_id,
count(DISTINCT s.session_id) OVER () AS total_count
{extra_col}
{query_part}) AS filtred_sessions
{extra_where}
GROUP BY {main_col}
@ -250,8 +253,10 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count,
{main_col} AS name,
count(DISTINCT user_id) AS total,
COALESCE(SUM(count(DISTINCT user_id)) OVER (), 0) AS total_count
FROM (SELECT s.user_id AS user_id {extra_col}
any(total_count) AS total_count
FROM (SELECT s.user_id AS user_id,
count(DISTINCT s.user_id) OVER () AS total_count
{extra_col}
{query_part}
WHERE isNotNull(user_id)
AND notEmpty(user_id)) AS filtred_sessions
@ -378,6 +383,34 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions_where = ["main.project_id = %(projectId)s",
"main.created_at >= toDateTime(%(startDate)s/1000)",
"main.created_at <= toDateTime(%(endDate)s/1000)"]
any_incident = False
for i, e in enumerate(data.events):
if e.type == schemas.EventType.INCIDENT and e.operator == schemas.SearchEventOperator.IS_ANY:
any_incident = True
data.events.pop(i)
# don't stop here because we could have multiple filters looking for any incident
if any_incident:
any_incident = False
for f in data.filters:
if f.type == schemas.FilterType.ISSUE:
any_incident = True
if f.value.index(schemas.IssueType.INCIDENT) < 0:
f.value.append(schemas.IssueType.INCIDENT)
if f.operator == schemas.SearchEventOperator.IS_ANY:
f.operator = schemas.SearchEventOperator.IS
break
if not any_incident:
data.filters.append(schemas.SessionSearchFilterSchema(**{
"type": "issue",
"isEvent": False,
"value": [
"incident"
],
"operator": "is"
}))
if len(data.filters) > 0:
meta_keys = None
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
@ -521,7 +554,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
sh.multi_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -668,10 +701,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
**sh.multi_values(event.source, value_key=s_k),
e_k: event.value[0] if len(event.value) > 0 else event.value}
if event_type == events.EventType.CLICK.ui_type:
if event_type == schemas.EventType.CLICK:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = events.EventType.CLICK.column
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -718,7 +751,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.CLICK_MOBILE.column
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -737,10 +770,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT.ui_type:
elif event_type == schemas.EventType.INPUT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = events.EventType.INPUT.column
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -765,7 +798,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
else:
_column = events.EventType.INPUT_MOBILE.column
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -785,7 +818,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.LOCATION.ui_type:
elif event_type == schemas.EventType.LOCATION:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = 'url_path'
@ -807,7 +840,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
)
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.VIEW_MOBILE.column
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -824,9 +857,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM.ui_type:
elif event_type == schemas.EventType.CUSTOM:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.CUSTOM.column
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -844,7 +877,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST.ui_type:
elif event_type == schemas.EventType.REQUEST:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(
@ -865,9 +898,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.STATEACTION.ui_type:
elif event_type == schemas.EventType.STATE_ACTION:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.STATEACTION.column
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -886,7 +919,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"] = event_where[-1]
# TODO: isNot for ERROR
elif event_type == events.EventType.ERROR.ui_type:
elif event_type == schemas.EventType.ERROR:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
event_where.append(
@ -911,8 +944,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# ----- Mobile
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
_column = events.EventType.CLICK_MOBILE.column
elif event_type == schemas.EventType.CLICK_MOBILE:
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -930,8 +963,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
_column = events.EventType.INPUT_MOBILE.column
elif event_type == schemas.EventType.INPUT_MOBILE:
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -949,8 +982,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
_column = events.EventType.VIEW_MOBILE.column
elif event_type == schemas.EventType.VIEW_MOBILE:
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -968,8 +1001,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
_column = events.EventType.CUSTOM_MOBILE.column
elif event_type == schemas.EventType.CUSTOM_MOBILE:
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -988,7 +1021,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
elif event_type == schemas.EventType.REQUEST_MOBILE:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(
@ -1008,8 +1041,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
_column = events.EventType.CRASH_MOBILE.column
elif event_type == schemas.EventType.ERROR_MOBILE:
_column = "name"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -1028,8 +1061,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
"main", "$properties", _column, op, event.value, e_k
))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
_column = events.EventType.SWIPE_MOBILE.column
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -1230,7 +1263,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(json_condition(
"main", "$properties", events.EventType.GRAPHQL.column, op, f.value, e_k_f
"main", "$properties", "name", op, f.value, e_k_f
))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
@ -1253,9 +1286,65 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
elif event_type == schemas.EventType.EVENT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.CLICK.column
_column = "label"
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
events_conditions.append({"type": event_where[-1], "condition": ""})
elif event_type == schemas.EventType.INCIDENT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = "label"
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if is_not:
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
}
)
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"main.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == schemas.EventType.CLICK_COORDINATES:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
event_where.append(
f"main.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if is_not:
event_where.append(
sh.coordinate_conditions(
condition_x=f"sub.`$properties`.normalized_x",
condition_y=f"sub.`$properties`.normalized_y",
values=event.value, value_key=e_k, is_not=True)
)
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(schemas.EventType.CLICK, platform=platform)}'"
}
)
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(
sh.coordinate_conditions(
condition_x=f"main.`$properties`.normalized_x",
condition_y=f"main.`$properties`.normalized_y",
values=event.value, value_key=e_k, is_not=True)
)
events_conditions[-1]["condition"] = event_where[-1]
else:
continue

View file

@ -2,7 +2,8 @@ import ast
import logging
import schemas
from chalicelib.core import events, metadata, projects
from chalicelib.core import metadata, projects
from chalicelib.core.events import events
from chalicelib.core.sessions import performance_event, sessions_favorite, sessions_legacy
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh
@ -410,7 +411,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -556,10 +557,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
if event_type == events.EventType.CLICK.ui_type:
if event_type == schemas.EventType.CLICK:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = events.EventType.CLICK.column
_column = "label"
event_where.append(
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -581,7 +582,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.CLICK_MOBILE.column
_column = "label"
event_where.append(
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -598,10 +599,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT.ui_type:
elif event_type == schemas.EventType.INPUT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = events.EventType.INPUT.column
_column = "label"
event_where.append(
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -622,7 +623,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
else:
_column = events.EventType.INPUT_MOBILE.column
_column = "label"
event_where.append(
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -639,7 +640,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.LOCATION.ui_type:
elif event_type == schemas.EventType.LOCATION:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
if platform == "web":
_column = 'url_path'
@ -659,7 +660,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.VIEW_MOBILE.column
_column = "name"
event_where.append(
f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -675,9 +676,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM.ui_type:
elif event_type == schemas.EventType.CUSTOM:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.CUSTOM.column
_column = "name"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -691,7 +692,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST.ui_type:
elif event_type == schemas.EventType.REQUEST:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
@ -708,9 +709,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.STATEACTION.ui_type:
elif event_type == schemas.EventType.STATE_ACTION:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.STATEACTION.column
_column = "name"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -725,7 +726,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
# TODO: isNot for ERROR
elif event_type == events.EventType.ERROR.ui_type:
elif event_type == schemas.EventType.ERROR:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main"
events_extra_join = f"SELECT * FROM {MAIN_EVENTS_TABLE} AS main1 WHERE main1.project_id=%(project_id)s"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
@ -746,8 +747,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# ----- Mobile
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
_column = events.EventType.CLICK_MOBILE.column
elif event_type == schemas.EventType.CLICK_MOBILE:
_column = "label"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -761,8 +762,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
_column = events.EventType.INPUT_MOBILE.column
elif event_type == schemas.EventType.INPUT_MOBILE:
_column = "label"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -776,8 +777,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
_column = events.EventType.VIEW_MOBILE.column
elif event_type == schemas.EventType.VIEW_MOBILE:
_column = "name"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -791,8 +792,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
_column = events.EventType.CUSTOM_MOBILE.column
elif event_type == schemas.EventType.CUSTOM_MOBILE:
_column = "name"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -806,7 +807,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
elif event_type == schemas.EventType.REQUEST_MOBILE:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
@ -822,8 +823,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
_column = events.EventType.CRASH_MOBILE.column
elif event_type == schemas.EventType.ERROR_MOBILE:
_column = "name"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -837,8 +838,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
_column = events.EventType.SWIPE_MOBILE.column
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
_column = "label"
event_where.append(f"main.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
@ -992,7 +993,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
_multiple_conditions(f"main.name {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
@ -1221,7 +1222,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**_multiple_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type:
if c.type == schemas.EventType.LOCATION:
_extra_or_condition.append(
_multiple_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
@ -1358,18 +1359,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
def get_session_user(project_id, user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""\
SELECT
user_id,
count(*) as session_count,
max(start_ts) as last_seen,
min(start_ts) as first_seen
FROM
"public".sessions
WHERE
project_id = %(project_id)s
AND user_id = %(userId)s
AND duration is not null
""" \
SELECT user_id,
count(*) as session_count,
max(start_ts) as last_seen,
min(start_ts) as first_seen
FROM "public".sessions
WHERE project_id = %(project_id)s
AND user_id = %(userId)s
AND duration is not null
GROUP BY user_id;
""",
{"project_id": project_id, "userId": user_id}

View file

@ -2,7 +2,8 @@ import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata
from chalicelib.core.events import events
from chalicelib.core import metadata
from . import performance_event
from chalicelib.utils import pg_client, helper, metrics_helper
from chalicelib.utils import sql_helper as sh
@ -439,7 +440,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
extra_constraints.append(
sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
elif filter_type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -580,36 +581,36 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.EventType.CLICK.ui_type:
if event_type == schemas.EventType.CLICK:
if platform == "web":
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
event_from = event_from % f"events.clicks AS main "
if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
else:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
value_key=e_k))
else:
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
event_from = event_from % f"events_ios.taps AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.label {op} %({e_k})s",
event.value,
value_key=e_k))
elif event_type == events.EventType.TAG.ui_type:
event_from = event_from % f"{events.EventType.TAG.table} AS main "
elif event_type == schemas.EventType.TAG:
event_from = event_from % f"events.tags AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.tag_id = %({e_k})s", event.value, value_key=e_k))
elif event_type == events.EventType.INPUT.ui_type:
elif event_type == schemas.EventType.INPUT:
if platform == "web":
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
event_from = event_from % f"events.inputs AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.label {op} %({e_k})s", event.value,
value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
@ -617,53 +618,53 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
else:
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
event_from = event_from % f"events_ios.inputs AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.label {op} %({e_k})s",
event.value,
value_key=e_k))
elif event_type == events.EventType.LOCATION.ui_type:
elif event_type == schemas.EventType.LOCATION:
if platform == "web":
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
event_from = event_from % f"events.pages AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
sh.multi_conditions(f"main.path {op} %({e_k})s",
event.value, value_key=e_k))
else:
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
event_from = event_from % f"events_ios.views AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.name {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
elif event_type == schemas.EventType.CUSTOM:
event_from = event_from % f"events_common.customs AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.name {op} %({e_k})s", event.value,
value_key=e_k))
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
elif event_type == schemas.EventType.REQUEST:
event_from = event_from % f"events_common.requests AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value,
sh.multi_conditions(f"main.path {op} %({e_k})s", event.value,
value_key=e_k))
# elif event_type == events.event_type.GRAPHQL.ui_type:
# elif event_type == schemas.event_type.GRAPHQL:
# event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
# if not is_any:
# event_where.append(
# _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value,
# value_key=e_k))
elif event_type == events.EventType.STATEACTION.ui_type:
event_from = event_from % f"{events.EventType.STATEACTION.table} AS main "
elif event_type == schemas.EventType.STATE_ACTION:
event_from = event_from % f"events.state_actions AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s",
sh.multi_conditions(f"main.name {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.ERROR.ui_type:
event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
elif event_type == schemas.EventType.ERROR:
event_from = event_from % f"events.errors AS main INNER JOIN public.errors AS main1 USING(error_id)"
event.source = list(set(event.source))
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
@ -674,59 +675,59 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
# ----- Mobile
elif event_type == events.EventType.CLICK_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.CLICK_MOBILE.table} AS main "
elif event_type == schemas.EventType.CLICK_MOBILE:
event_from = event_from % f"events_ios.taps AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.label {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.INPUT_MOBILE.table} AS main "
elif event_type == schemas.EventType.INPUT_MOBILE:
event_from = event_from % f"events_ios.inputs AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.INPUT_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.label {op} %({e_k})s",
event.value, value_key=e_k))
if event.source is not None and len(event.source) > 0:
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")}
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.VIEW_MOBILE.table} AS main "
elif event_type == schemas.EventType.VIEW_MOBILE:
event_from = event_from % f"events_ios.views AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.VIEW_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.name {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.CUSTOM_MOBILE.table} AS main "
elif event_type == schemas.EventType.CUSTOM_MOBILE:
event_from = event_from % f"events_common.customs AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CUSTOM_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.name {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.REQUEST_MOBILE.table} AS main "
elif event_type == schemas.EventType.REQUEST_MOBILE:
event_from = event_from % f"events_common.requests AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.REQUEST_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.path {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
event_from = event_from % f"{events.EventType.CRASH_MOBILE.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
elif event_type == schemas.EventType.ERROR_MOBILE:
event_from = event_from % f"events_common.crashes AS main INNER JOIN public.crashes_ios AS main1 USING(crash_ios_id)"
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
event_from = event_from % f"{events.EventType.SWIPE_MOBILE.table} AS main "
elif event_type == schemas.EventType.SWIPE_MOBILE and platform != "web":
event_from = event_from % f"events_ios.swipes AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.SWIPE_MOBILE.column} {op} %({e_k})s",
sh.multi_conditions(f"main.label {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
event_from = event_from % f"events_common.requests AS main "
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s",
sh.multi_conditions(f"main.path {op} %({e_k})s",
event.value, value_key=e_k))
col = performance_event.get_col(event_type)
colname = col["column"]
@ -751,7 +752,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
]:
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
event_from = event_from % f"events.pages AS main "
col = performance_event.get_col(event_type)
colname = col["column"]
tname = "main"
@ -762,7 +763,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
f"{tname}.timestamp <= %(endDate)s"]
if not is_any:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
sh.multi_conditions(f"main.path {op} %({e_k})s",
event.value, value_key=e_k))
e_k += "_custom"
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
@ -772,7 +773,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event.source, value_key=e_k))
elif event_type == schemas.EventType.REQUEST_DETAILS:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
event_from = event_from % f"events_common.requests AS main "
apply = False
for j, f in enumerate(event.filters):
is_any = sh.isAny_opreator(f.operator)
@ -784,7 +785,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
sh.multi_conditions(f"main.path {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
@ -816,7 +817,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
if not apply:
continue
elif event_type == schemas.EventType.GRAPHQL:
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
event_from = event_from % f"events.graphql AS main "
for j, f in enumerate(event.filters):
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
@ -827,7 +828,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
sh.multi_conditions(f"main.name {op} %({e_k_f})s", f.value,
value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
event_where.append(
@ -908,7 +909,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
# b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
if errors_only:
extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_from += f" INNER JOIN events.errors AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
extra_constraints.append("ser.source = 'js_exception'")
extra_constraints.append("ser.project_id = %(project_id)s")
# if error_status != schemas.ErrorStatus.all:
@ -984,9 +985,9 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**sh.multi_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type:
if c.type == schemas.EventType.LOCATION:
_extra_or_condition.append(
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
sh.multi_conditions(f"ev.path {op} %({e_k})s",
c.value, value_key=e_k))
else:
logger.warning(f"unsupported extra_event type:${c.type}")
@ -1044,18 +1045,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
def get_session_user(project_id, user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""\
SELECT
user_id,
count(*) as session_count,
max(start_ts) as last_seen,
min(start_ts) as first_seen
FROM
"public".sessions
WHERE
project_id = %(project_id)s
AND user_id = %(userId)s
AND duration is not null
""" \
SELECT user_id,
count(*) as session_count,
max(start_ts) as last_seen,
min(start_ts) as first_seen
FROM "public".sessions
WHERE project_id = %(project_id)s
AND user_id = %(userId)s
AND duration is not null
GROUP BY user_id;
""",
{"project_id": project_id, "userId": user_id}
@ -1074,11 +1072,10 @@ def count_all():
def session_exists(project_id, session_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT 1
FROM public.sessions
WHERE session_id=%(session_id)s
AND project_id=%(project_id)s
LIMIT 1;""",
query = cur.mogrify("""SELECT 1
FROM public.sessions
WHERE session_id = %(session_id)s
AND project_id = %(project_id)s LIMIT 1;""",
{"project_id": project_id, "session_id": session_id})
cur.execute(query)
row = cur.fetchone()

View file

@ -1,6 +1,7 @@
import schemas
from chalicelib.core import events, metadata, events_mobile, \
issues, assist, canvas, user_testing
from chalicelib.core import metadata, assist, canvas, user_testing
from chalicelib.core.issues import issues
from chalicelib.core.events import events, events_mobile
from . import sessions_mobs, sessions_devtool
from chalicelib.core.errors.modules import errors_helper
from chalicelib.utils import pg_client, helper
@ -128,30 +129,8 @@ def get_events(project_id, session_id):
data['userTesting'] = user_testing.get_test_signals(session_id=session_id, project_id=project_id)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['issues'] = reduce_issues(data['issues'])
data['issues'] = issues.reduce_issues(data['issues'])
data['incidents'] = events.get_incidents_by_session_id(session_id=session_id, project_id=project_id)
return data
else:
return None
# To reduce the number of issues in the replay;
# will be removed once we agree on how to show issues
def reduce_issues(issues_list):
if issues_list is None:
return None
i = 0
# remove same-type issues if the time between them is <2s
while i < len(issues_list) - 1:
for j in range(i + 1, len(issues_list)):
if issues_list[i]["type"] == issues_list[j]["type"]:
break
else:
i += 1
break
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:
issues_list.pop(j)
else:
i += 1
return issues_list

View file

@ -64,8 +64,7 @@ def __parse_metadata(metadata_map):
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
count_only=False, issue=None, ids_only=False):
platform = project.platform
count_only=False, issue=None, ids_only=False, metric_of: schemas.MetricOfTable = None):
if data.bookmarked:
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)
if data.startTimestamp is None:
@ -75,18 +74,78 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
'sessions': [],
'_src': 2
}
# ---------------------- extra filter in order to only select sessions that has been used in the card-table
extra_event = None
# extra_deduplication = []
extra_conditions = None
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = f"""SELECT DISTINCT ev.session_id,
JSONExtractString(toString(ev.`$properties`), 'url_path') AS url_path
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
WHERE ev.created_at >= toDateTime(%(startDate)s / 1000)
AND ev.created_at <= toDateTime(%(endDate)s / 1000)
AND ev.project_id = %(project_id)s
AND ev.`$event_name` = 'LOCATION'"""
# extra_deduplication.append("url_path")
extra_conditions = {}
for e in data.events:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
"type": e.type,
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
extra_conditions[e.operator].value.append(v)
extra_conditions = list(extra_conditions.values())
elif metric_of == schemas.MetricOfTable.FETCH:
extra_event = f"""SELECT DISTINCT ev.session_id
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
WHERE ev.created_at >= toDateTime(%(startDate)s / 1000)
AND ev.created_at <= toDateTime(%(endDate)s / 1000)
AND ev.project_id = %(project_id)s
AND ev.`$event_name` = 'REQUEST'"""
# extra_deduplication.append("url_path")
extra_conditions = {}
for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema(**{
"type": e.type,
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
extra_conditions[e.operator].value.append(v)
extra_conditions = list(extra_conditions.values())
# elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
# data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
# operator=schemas.SearchEventOperator.IS))
# ----------------------
if project.platform == "web":
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=error_status,
errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue,
project_id=project.project_id,
user_id=user_id, platform=platform)
user_id=user_id, platform=project.platform,
extra_event=extra_event,
# extra_deduplication=extra_deduplication,
extra_conditions=extra_conditions)
else:
full_args, query_part = sessions_legacy_mobil.search_query_parts_ch(data=data, error_status=error_status,
errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue,
project_id=project.project_id,
user_id=user_id, platform=platform)
user_id=user_id, platform=project.platform)
if data.sort == "startTs":
data.sort = "datetime"
if data.limit is not None and data.page is not None:

View file

@ -40,7 +40,7 @@ COALESCE((SELECT TRUE
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.ProjectContext,
user_id, errors_only=False, error_status=schemas.ErrorStatus.ALL,
count_only=False, issue=None, ids_only=False):
count_only=False, issue=None, ids_only=False, metric_of: schemas.MetricOfTable = None):
platform = project.platform
if data.bookmarked:
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project.project_id, user_id)

View file

@ -1 +1,2 @@
from .sessions_viewed import *
from .sessions_viewed import *
from .sessions_viewed_ch import *

View file

@ -87,7 +87,7 @@ async def create_tenant(data: schemas.UserSignupSchema):
"spotRefreshToken": r.pop("spotRefreshToken"),
"spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"),
'data': {
"scopeState": 0,
"scopeState": 2,
"user": r
}
}

View file

@ -56,7 +56,8 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
schemas.EventType.ERROR: "ERROR",
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE',
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE',
schemas.FetchFilterType.FETCH_URL: 'REQUEST'
schemas.FetchFilterType.FETCH_URL: 'REQUEST',
schemas.EventType.INCIDENT: "INCIDENT",
}
defs_mobile = {
schemas.EventType.CLICK_MOBILE: "TAP",
@ -65,7 +66,8 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
schemas.EventType.REQUEST_MOBILE: "REQUEST",
schemas.EventType.ERROR_MOBILE: "CRASH",
schemas.EventType.VIEW_MOBILE: "VIEW",
schemas.EventType.SWIPE_MOBILE: "SWIPE"
schemas.EventType.SWIPE_MOBILE: "SWIPE",
schemas.EventType.INCIDENT: "INCIDENT"
}
if platform != "web" and event_type in defs_mobile:
return defs_mobile.get(event_type)

View file

@ -1 +0,0 @@
from .or_cache import CachedResponse

View file

@ -1,83 +0,0 @@
import functools
import inspect
import json
import logging
from chalicelib.utils import pg_client
import time
from fastapi.encoders import jsonable_encoder
logger = logging.getLogger(__name__)
class CachedResponse:
def __init__(self, table, ttl):
self.table = table
self.ttl = ttl
def __call__(self, func):
self.param_names = {i: param for i, param in enumerate(inspect.signature(func).parameters)}
@functools.wraps(func)
def wrapper(*args, **kwargs):
values = dict()
for i, param in self.param_names.items():
if i < len(args):
values[param] = args[i]
elif param in kwargs:
values[param] = kwargs[param]
else:
values[param] = None
result = self.__get(values)
if result is None or result["expired"] \
or result["result"] is None or len(result["result"]) == 0:
now = time.time()
result = func(*args, **kwargs)
now = time.time() - now
if result is not None and len(result) > 0:
self.__add(values, result, now)
result[0]["cached"] = False
else:
logger.info(f"using cached response for "
f"{func.__name__}({','.join([f'{key}={val}' for key, val in enumerate(values)])})")
result = result["result"]
result[0]["cached"] = True
return result
return wrapper
def __get(self, values):
with pg_client.PostgresClient() as cur:
sub_constraints = []
for key, value in values.items():
if value is not None:
sub_constraints.append(f"{key}=%({key})s")
else:
sub_constraints.append(f"{key} IS NULL")
query = f"""SELECT result,
(%(ttl)s>0
AND EXTRACT(EPOCH FROM (timezone('utc'::text, now()) - created_at - INTERVAL %(interval)s)) > 0) AS expired
FROM {self.table}
WHERE {" AND ".join(sub_constraints)}"""
query = cur.mogrify(query, {**values, 'ttl': self.ttl, 'interval': f'{self.ttl} seconds'})
logger.debug("------")
logger.debug(query)
logger.debug("------")
cur.execute(query)
result = cur.fetchone()
return result
def __add(self, values, result, execution_time):
with pg_client.PostgresClient() as cur:
query = f"""INSERT INTO {self.table} ({",".join(values.keys())},result,execution_time)
VALUES ({",".join([f"%({param})s" for param in values.keys()])},%(result)s,%(execution_time)s)
ON CONFLICT ({",".join(values.keys())}) DO UPDATE SET result=%(result)s,
execution_time=%(execution_time)s,
created_at=timezone('utc'::text, now());"""
query = cur.mogrify(query, {**values,
"result": json.dumps(jsonable_encoder(result)),
"execution_time": execution_time})
logger.debug("------")
logger.debug(query)
logger.debug("------")
cur.execute(query)

View file

@ -44,7 +44,7 @@ def reverse_sql_operator(op):
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
def multi_conditions(condition, values, value_key="value", is_not=False):
def multi_conditions(condition, values, value_key="value", is_not=False) -> str:
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
@ -79,3 +79,30 @@ def single_value(values):
if isinstance(v, Enum):
values[i] = v.value
return values
def coordinate_conditions(condition_x, condition_y, values, value_key="value", is_not=False):
query = []
if len(values) == 2:
# if 2 values are provided, it means x=v[0] and y=v[1]
for i in range(len(values)):
k = f"{value_key}_{i}"
if i == 0:
query.append(f"{condition_x}=%({k})s")
elif i == 1:
query.append(f"{condition_y}=%({k})s")
elif len(values) == 4:
# if 4 values are provided, it means v[0]<=x<=v[1] and v[2]<=y<=v[3]
for i in range(len(values)):
k = f"{value_key}_{i}"
if i == 0:
query.append(f"{condition_x}>=%({k})s")
elif i == 1:
query.append(f"{condition_x}<=%({k})s")
elif i == 2:
query.append(f"{condition_y}>=%({k})s")
elif i == 3:
query.append(f"{condition_y}<=%({k})s")
return "(" + (" AND " if is_not else " OR ").join(query) + ")"

View file

@ -75,4 +75,5 @@ EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_SESSIONS_SEARCH=true
EXP_SESSIONS_SEARCH=true
EXP_EVENTS=true

View file

@ -68,4 +68,5 @@ EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_METRICS=true
EXP_EVENTS=true

View file

@ -1,9 +1,9 @@
urllib3==2.4.0
requests==2.32.3
boto3==1.38.10
boto3==1.38.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.7
psycopg[pool,binary]==3.2.9
clickhouse-connect==0.8.17
elasticsearch==9.0.1
jira==3.8.0

View file

@ -1,9 +1,9 @@
urllib3==2.4.0
requests==2.32.3
boto3==1.38.10
boto3==1.38.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.7
psycopg[pool,binary]==3.2.9
clickhouse-connect==0.8.17
elasticsearch==9.0.1
jira==3.8.0
@ -15,4 +15,4 @@ python-decouple==3.8
pydantic[email]==2.11.4
apscheduler==3.11.0
redis==6.0.0
redis==6.1.0

View file

@ -4,8 +4,9 @@ from decouple import config
from fastapi import Depends, Body, BackgroundTasks
import schemas
from chalicelib.core import events, projects, issues, metadata, reset_password, log_tools, \
from chalicelib.core import events, projects, metadata, reset_password, log_tools, \
announcements, weekly_report, assist, mobile, tenants, boarding, notifications, webhook, users, saved_search, tags
from chalicelib.core.issues import issues
from chalicelib.core.sourcemaps import sourcemaps
from chalicelib.core.metrics import custom_metrics
from chalicelib.core.alerts import alerts

View file

@ -8,6 +8,7 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
import schemas
from chalicelib.core import assist, signup, feature_flags
from chalicelib.core import notes
from chalicelib.core import scope
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
@ -473,8 +474,8 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
@app.get('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def get_note_by_id(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
user_id=context.user_id)
data = notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
user_id=context.user_id)
if "errors" in data:
return data
return {
@ -487,8 +488,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]}
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id, data=data)
data = notes.create(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id, data=data)
if "errors" in data.keys():
return data
return {
@ -498,8 +499,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
data = notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
if "errors" in data:
return data
return {
@ -510,8 +511,8 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
data = notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
if "errors" in data.keys():
return data
return {
@ -521,29 +522,29 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
data = notes.delete(project_id=projectId, note_id=noteId)
return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
return notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
return notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
data = notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
if "errors" in data:
return data
return {'data': data}

View file

@ -21,7 +21,9 @@ def schema_extra(schema: dict, _):
class BaseModel(_BaseModel):
model_config = ConfigDict(alias_generator=attribute_to_camel_case,
use_enum_values=True,
json_schema_extra=schema_extra)
json_schema_extra=schema_extra,
# extra='forbid'
)
class Enum(_Enum):

View file

@ -406,6 +406,8 @@ class EventType(str, Enum):
ERROR_MOBILE = "errorMobile"
SWIPE_MOBILE = "swipeMobile"
EVENT = "event"
INCIDENT = "incident"
CLICK_COORDINATES = "clickCoordinates"
class PerformanceEventType(str, Enum):
@ -506,8 +508,8 @@ class IssueType(str, Enum):
CUSTOM = 'custom'
JS_EXCEPTION = 'js_exception'
MOUSE_THRASHING = 'mouse_thrashing'
# IOS
TAP_RAGE = 'tap_rage'
TAP_RAGE = 'tap_rage' # IOS
INCIDENT = 'incident'
class MetricFormatType(str, Enum):
@ -659,6 +661,10 @@ class SessionSearchEventSchema(BaseModel):
elif self.type == EventType.GRAPHQL:
assert isinstance(self.filters, List) and len(self.filters) > 0, \
f"filters should be defined for {EventType.GRAPHQL}"
elif self.type == EventType.CLICK_COORDINATES:
assert isinstance(self.value, List) \
and (len(self.value) == 0 or len(self.value) == 2 or len(self.value) == 4), \
f"value should be [x,y] or [x1,x2,y1,y2] for {EventType.CLICK_COORDINATES}"
if isinstance(self.operator, ClickEventExtraOperator):
assert self.type == EventType.CLICK, \
@ -1037,11 +1043,16 @@ class MetricOfPathAnalysis(str, Enum):
session_count = MetricOfTimeseries.SESSION_COUNT.value
# class CardSessionsSchema(SessionsSearchPayloadSchema):
class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
startTimestamp: int = Field(default=TimeUTC.now(-7))
endTimestamp: int = Field(default=TimeUTC.now())
density: int = Field(default=7, ge=1, le=200)
# we need metric_type&metric_of in the payload of sessions search
# because the API will retrun all sessions if the card is not identified
# example: table of requests contains only sessions that have a request,
# but drill-down doesn't take that into consideration
metric_type: MetricType = Field(...)
metric_of: Any
series: List[CardSeriesSchema] = Field(default_factory=list)
# events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
@ -1119,8 +1130,6 @@ class __CardSchema(CardSessionsSchema):
thumbnail: Optional[str] = Field(default=None)
metric_format: Optional[MetricFormatType] = Field(default=None)
view_type: Any
metric_type: MetricType = Field(...)
metric_of: Any
metric_value: List[IssueType] = Field(default_factory=list)
# This is used to save the selected session for heatmaps
session_id: Optional[int] = Field(default=None)
@ -1520,7 +1529,7 @@ class MetricSearchSchema(_PaginatedSchema):
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
type: Literal[EventType.LOCATION] = Field(...)
type: Literal[EventType.LOCATION, EventType.CLICK_COORDINATES] = Field(...)
class HeatMapSessionsSearch(SessionsSearchPayloadSchema):

View file

@ -5,7 +5,7 @@ from pydantic import ValidationInfo
from .overrides import Enum
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#]*$"
NAME_PATTERN = r"^[a-z,A-Z,0-9,\-,é,è,à,ç, ,|,&,\/,\\,_,.,#,']*$"
def transform_email(email: str) -> str:

View file

@ -115,7 +115,7 @@ var batches = map[string]string{
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties", properties) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$os", "$browser", "$referrer", "$country", "$state", "$city", "$current_url", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
@ -726,7 +726,6 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
jsonString, err := json.Marshal(map[string]interface{}{
"payload": msg.Payload,
"user_device": session.UserDevice,
"user_device_type": session.UserDeviceType,
"page_title ": msg.PageTitle,
@ -734,6 +733,14 @@ func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.Cu
if err != nil {
return fmt.Errorf("can't marshal custom event: %s", err)
}
var customPayload interface{}
if err := json.Unmarshal([]byte(msg.Payload), customPayload); err != nil {
log.Printf("can't unmarshal custom event payload into object: %s", err)
customPayload = map[string]interface{}{
"payload": msg.Payload,
}
}
eventTime := datetime(msg.Timestamp)
if err := c.batches["custom"].Append(
session.SessionID,
@ -753,7 +760,8 @@ func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.Cu
session.UserState,
session.UserCity,
cropString(msg.Url),
jsonString,
jsonString, // $properties
customPayload, // properties
); err != nil {
c.checkError("custom", err)
return fmt.Errorf("can't append to custom batch: %s", err)

5
ee/api/.gitignore vendored
View file

@ -201,8 +201,7 @@ Pipfile.lock
/chalicelib/core/metrics/heatmaps
/chalicelib/core/metrics/product_analytics
/chalicelib/core/metrics/product_anaytics2.py
/chalicelib/core/events.py
/chalicelib/core/events_mobile.py
/chalicelib/core/events
/chalicelib/core/feature_flags.py
/chalicelib/core/issue_tracking/*
/chalicelib/core/issues.py
@ -212,7 +211,7 @@ Pipfile.lock
/chalicelib/core/mobile.py
/chalicelib/core/saved_search.py
/chalicelib/core/sessions/*.py
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
/chalicelib/core/sessions/sessions_viewed
/chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps

View file

@ -4,28 +4,28 @@ verify_ssl = true
name = "pypi"
[packages]
urllib3 = "==2.3.0"
urllib3 = "==2.4.0"
requests = "==2.32.3"
boto3 = "==1.37.21"
boto3 = "==1.38.16"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.2"
psycopg = {extras = ["pool", "binary"], version = "==3.2.7"}
clickhouse-connect = "==0.8.17"
elasticsearch = "==9.0.1"
jira = "==3.8.0"
cachetools = "==5.5.2"
fastapi = "==0.115.12"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
uvicorn = {extras = ["standard"], version = "==0.34.2"}
gunicorn = "==23.0.0"
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}
pydantic = {extras = ["email"], version = "==2.11.4"}
apscheduler = "==3.11.0"
python3-saml = "==1.16.0"
python-multipart = "==0.0.20"
redis = "==5.2.1"
azure-storage-blob = "==12.25.0"
redis = "==6.1.0"
azure-storage-blob = "==12.25.1"
[dev-packages]
[requires]
python_version = "3.12"
python_full_version = "3.12.8"

View file

@ -1,2 +0,0 @@
from .sessions_viewed import *
from .sessions_viewed_ee import *

View file

@ -98,7 +98,7 @@ async def create_tenant(data: schemas.UserSignupSchema):
"spotRefreshTokenMaxAge": r.pop("spotRefreshTokenMaxAge"),
"tenantId": t["tenant_id"],
'data': {
"scopeState": 0,
"scopeState": 2,
"user": r
}
}

View file

@ -21,8 +21,7 @@ rm -rf ./chalicelib/core/metrics/dashboards.py
rm -rf ./chalicelib/core/metrics/heatmaps
rm -rf ./chalicelib/core/metrics/product_analytics
rm -rf ./chalicelib/core/metrics/product_anaytics2.py
rm -rf ./chalicelib/core/events.py
rm -rf ./chalicelib/core/events_mobile.py
rm -rf ./chalicelib/core/events
rm -rf ./chalicelib/core/feature_flags.py
rm -rf ./chalicelib/core/issue_tracking
rm -rf ./chalicelib/core/integrations_manager.py
@ -33,7 +32,7 @@ rm -rf ./chalicelib/core/metadata.py
rm -rf ./chalicelib/core/mobile.py
rm -rf ./chalicelib/core/saved_search.py
rm -rf ./chalicelib/core/sessions/*.py
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/sessions_viewed
rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps

View file

@ -1,6 +1,6 @@
urllib3==2.4.0
requests==2.32.3
boto3==1.38.10
boto3==1.38.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.7

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
urllib3==2.4.0
requests==2.32.3
boto3==1.37.21
boto3==1.38.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.7
@ -14,5 +14,5 @@ python-decouple==3.8
pydantic[email]==2.11.4
apscheduler==3.11.0
redis==6.0.0
redis==6.1.0
azure-storage-blob==12.25.1

View file

@ -1,6 +1,6 @@
urllib3==2.4.0
requests==2.32.3
boto3==1.38.10
boto3==1.38.16
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.7
@ -18,9 +18,11 @@ apscheduler==3.11.0
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
python3-saml==1.16.0 --no-binary=lxml
python3-saml==1.16.0
--no-binary=lxml
python-multipart==0.0.20
redis==6.0.0
redis==6.1.0
#confluent-kafka==2.1.0
azure-storage-blob==12.25.1

View file

@ -8,6 +8,7 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
import schemas
from chalicelib.core import assist, signup, feature_flags
from chalicelib.core import notes
from chalicelib.core import scope
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
@ -274,8 +275,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
Depends(contextual_validators.validate_contextual_payload),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id,
platform=context.project.platform)
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id)
return {'data': data}
@ -284,8 +284,7 @@ def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema =
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = \
Depends(contextual_validators.validate_contextual_payload),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True,
platform=context.project.platform)
data = sessions_search.search_sessions(data=data, project=context.project, user_id=context.user_id, ids_only=True)
return {'data': data}
@ -510,8 +509,8 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
@app.get('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_note_by_id(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
user_id=context.user_id)
data = notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
user_id=context.user_id)
if "errors" in data:
return data
return {
@ -525,8 +524,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]}
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id, data=data)
data = notes.create(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id, data=data)
if "errors" in data.keys():
return data
return {
@ -537,8 +536,8 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.READ_NOTES)])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
data = notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
if "errors" in data:
return data
return {
@ -550,8 +549,8 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
data = notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
if "errors" in data.keys():
return data
return {
@ -562,7 +561,7 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
data = notes.delete(project_id=projectId, note_id=noteId)
return data
@ -570,22 +569,22 @@ def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.Curr
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
return notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
return notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
data = notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
if "errors" in data:
return data
return {'data': data}

View file

@ -1,3 +1,16 @@
SELECT 1
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
SELECT 1
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
ALTER TABLE experimental.sessions
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
SET allow_experimental_json_type = 1;

View file

@ -1,3 +1,16 @@
SELECT 1
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
SELECT 1
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
ALTER TABLE experimental.sessions
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
DROP TABLE IF EXISTS product_analytics.all_events;

View file

@ -106,7 +106,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String),
user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32,
@ -134,7 +134,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
_timestamp DateTime DEFAULT now()
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
@ -791,8 +791,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
-- Incremental materialized view to get random examples of property values using $properties & properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mvREFRESHEVERY30HOURTOproduct_analytics.property_values_samples AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
@ -843,8 +842,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_events_grouped
ORDER BY (project_id, value)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mv
REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_events_grouped AS
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_events_grouped_mvREFRESHEVERY30MINUTETOproduct_analytics.autocomplete_events_grouped AS
SELECT project_id,
value,
count(1) AS data_count,
@ -873,7 +871,8 @@ SELECT project_id,
_timestamp
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
WHERE length(value) > 0 AND isNull(toFloat64OrNull(value))
WHERE length(value) > 0
AND isNull(toFloat64OrNull(value))
AND _timestamp > now() - INTERVAL 1 MONTH;
@ -889,8 +888,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.autocomplete_event_properties_group
ORDER BY (project_id, event_name, property_name, value)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mv
REFRESH EVERY 30 MINUTE TO product_analytics.autocomplete_event_properties_grouped AS
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.autocomplete_event_properties_grouped_mvREFRESHEVERY30MINUTETOproduct_analytics.autocomplete_event_properties_grouped AS
SELECT project_id,
event_name,
property_name,

View file

@ -19,7 +19,9 @@ $fn_def$, :'next_version')
--
DROP SCHEMA IF EXISTS or_cache CASCADE;
ALTER TABLE public.tenants
ALTER COLUMN scope_state SET DEFAULT 2;
COMMIT;

View file

@ -17,7 +17,6 @@ BEGIN;
CREATE SCHEMA IF NOT EXISTS events_common;
CREATE SCHEMA IF NOT EXISTS events;
CREATE SCHEMA IF NOT EXISTS events_ios;
CREATE SCHEMA IF NOT EXISTS or_cache;
CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE EXTENSION IF NOT EXISTS pgcrypto;
@ -105,7 +104,7 @@ CREATE TABLE public.tenants
t_users integer NOT NULL DEFAULT 1,
t_integrations integer NOT NULL DEFAULT 0,
last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT),
scope_state smallint NOT NULL DEFAULT 0
scope_state smallint NOT NULL DEFAULT 2
);
@ -1264,17 +1263,6 @@ CREATE TABLE public.projects_conditions
filters jsonb NOT NULL DEFAULT '[]'::jsonb
);
CREATE TABLE or_cache.autocomplete_top_values
(
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
event_type text NOT NULL,
event_key text NULL,
result jsonb NULL,
execution_time integer NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
UNIQUE NULLS NOT DISTINCT (project_id, event_type, event_key)
);
CREATE SCHEMA IF NOT EXISTS spots;
CREATE TABLE IF NOT EXISTS spots.spots

View file

@ -17,6 +17,18 @@ $$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
CREATE SCHEMA IF NOT EXISTS or_cache;
CREATE TABLE IF NOT EXISTS or_cache.autocomplete_top_values
(
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
event_type text NOT NULL,
event_key text NULL,
result jsonb NULL,
execution_time integer NULL,
created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
UNIQUE NULLS NOT DISTINCT (project_id, event_type, event_key)
);
COMMIT;

View file

@ -32,7 +32,6 @@ const components: any = {
),
SpotsListPure: lazy(() => import('Components/Spots/SpotsList')),
SpotPure: lazy(() => import('Components/Spots/SpotPlayer')),
ScopeSetup: lazy(() => import('Components/ScopeForm')),
HighlightsPure: lazy(() => import('Components/Highlights/HighlightsList')),
KaiPure: lazy(() => import('Components/Kai/KaiChat')),
};
@ -111,7 +110,6 @@ function PrivateRoutes() {
const sites = projectsStore.list;
const { siteId } = projectsStore;
const hasRecordings = sites.some((s) => s.recorded);
const redirectToSetup = scope === 0;
const redirectToOnboarding =
!onboarding &&
(localStorage.getItem(GLOBAL_HAS_NO_RECORDINGS) === 'true' ||
@ -138,13 +136,6 @@ function PrivateRoutes() {
return (
<Suspense fallback={<Loader loading className="flex-1" />}>
<Switch key="content">
<Route
exact
strict
path={SCOPE_SETUP}
component={enhancedComponents.ScopeSetup}
/>
{redirectToSetup ? <Redirect to={SCOPE_SETUP} /> : null}
<Route path={CLIENT_PATH} component={enhancedComponents.Client} />
<Route
path={withSiteId(ONBOARDING_PATH, siteIdList)}

View file

@ -195,7 +195,7 @@ const EChartsSankey: React.FC<Props> = (props) => {
header: {
fontWeight: '600',
fontSize: 12,
color: '#333',
color: 'var(--color-gray-darkest)',
overflow: 'truncate',
paddingBottom: '.5rem',
paddingLeft: '14px',
@ -203,16 +203,16 @@ const EChartsSankey: React.FC<Props> = (props) => {
},
body: {
fontSize: 12,
color: '#000',
color: 'var(--color-black)',
},
percentage: {
fontSize: 12,
color: '#454545',
color: 'var(--color-gray-dark)',
},
sessions: {
fontSize: 12,
fontFamily: "mono, 'monospace', sans-serif",
color: '#999999',
color: 'var(--color-gray-dark)',
},
clickIcon: {
backgroundColor: {
@ -266,6 +266,7 @@ const EChartsSankey: React.FC<Props> = (props) => {
},
tooltip: {
formatter: sankeyTooltip(echartNodes, nodeValues),
backgroundColor: 'var(--color-white)',
},
nodeAlign: 'left',
nodeWidth: 40,

View file

@ -7,6 +7,7 @@ import { useModal } from 'Components/ModalContext';
import Widget from '@/mstore/types/widget';
import { useTranslation } from 'react-i18next';
import { FilterKey } from 'Types/filter/filterType';
import { observer } from 'mobx-react-lite';
interface Props {
metric?: any;
@ -128,4 +129,4 @@ function SessionsBy(props: Props) {
);
}
export default SessionsBy;
export default observer(SessionsBy);

View file

@ -3,7 +3,7 @@ import { Tag } from 'antd';
function MethodType({ data }) {
return (
<Tag bordered={false} className="rounded-lg bg-indigo-50">
<Tag bordered={false} className="rounded-lg bg-indigo-lightest">
{data.method}
</Tag>
);

View file

@ -47,7 +47,7 @@ function AddCardSelectionModal(props: Props) {
<Row gutter={16} justify="center" className="py-5">
<Col span={12}>
<div
className="flex flex-col items-center justify-center hover:bg-indigo-50 border rounded-lg shadow-sm cursor-pointer gap-3"
className="flex flex-col items-center justify-center hover:bg-indigo-lightest border rounded-lg shadow-sm cursor-pointer gap-3"
style={{ height: '80px' }}
onClick={() => onClick(true)}
>
@ -57,7 +57,7 @@ function AddCardSelectionModal(props: Props) {
</Col>
<Col span={12}>
<div
className="flex flex-col items-center justify-center hover:bg-indigo-50 border rounded-lg shadow-sm cursor-pointer gap-3"
className="flex flex-col items-center justify-center hover:bg-indigo-lightest border rounded-lg shadow-sm cursor-pointer gap-3"
style={{ height: '80px' }}
onClick={() => onClick(false)}
>

View file

@ -123,7 +123,7 @@ function AlertListItem(props: Props) {
<div className="col-span-2">
<div className="flex items-center">
<Tag
className="rounded-full bg-indigo-50 cap-first text-base"
className="rounded-full bg-indigo-lightest cap-first text-base"
bordered={false}
>
{alert.detectionMethod}

View file

@ -15,7 +15,7 @@ function CardIssueItem(props: Props) {
title={issue.name}
description={<div className="text-nowrap truncate">{issue.source}</div>}
avatar={<Icon name={issue.icon} size="24" />}
className="cursor-pointer hover:bg-indigo-50"
className="cursor-pointer hover:bg-indigo-lightest"
/>
<div>{issue.sessionCount}</div>
</>

View file

@ -60,7 +60,7 @@ function CardsLibrary(props: Props) {
onClick={(e) => onItemClick(e, metric.metricId)}
/>
<Card
className="border border-transparent hover:border-indigo-50 hover:shadow-sm rounded-lg"
className="border border-transparent hover:border-indigo-lightest hover:shadow-sm rounded-lg"
style={{
border: selectedList.includes(metric.metricId)
? '1px solid #1890ff'

View file

@ -1,195 +0,0 @@
import React, { useEffect } from 'react';
import { useObserver } from 'mobx-react-lite';
import { Icon, Loader } from 'UI';
import cn from 'classnames';
import { useStore } from 'App/mstore';
import WidgetWrapper from '../WidgetWrapper';
import { useTranslation } from 'react-i18next';
interface IWiProps {
category: Record<string, any>;
onClick: (category: Record<string, any>) => void;
isSelected: boolean;
selectedWidgetIds: string[];
}
const ICONS: Record<string, string | null> = {
errors: 'errors-icon',
performance: 'performance-icon',
resources: 'resources-icon',
overview: null,
custom: null,
};
export function WidgetCategoryItem({
category,
isSelected,
onClick,
selectedWidgetIds,
}: IWiProps) {
const selectedCategoryWidgetsCount = useObserver(
() =>
category.widgets.filter((widget: any) =>
selectedWidgetIds.includes(widget.metricId),
).length,
);
return (
<div
className={cn('rounded p-4 border cursor-pointer hover:bg-active-blue', {
'bg-active-blue border-blue': isSelected,
'bg-white': !isSelected,
})}
onClick={() => onClick(category)}
>
<div className="font-medium text-lg mb-2 capitalize flex items-center">
{/* @ts-ignore */}
{ICONS[category.name] && (
<Icon name={ICONS[category.name]} size={18} className="mr-2" />
)}
{category.name}
</div>
<div className="mb-2 text-sm leading-tight">{category.description}</div>
{selectedCategoryWidgetsCount > 0 && (
<div className="flex items-center">
<span className="color-gray-medium text-sm">{`Selected ${selectedCategoryWidgetsCount} of ${category.widgets.length}`}</span>
</div>
)}
</div>
);
}
interface IProps {
handleCreateNew?: () => void;
isDashboardExists?: boolean;
}
function DashboardMetricSelection(props: IProps) {
const { t } = useTranslation();
const { dashboardStore } = useStore();
const widgetCategories: any[] = useObserver(
() => dashboardStore.widgetCategories,
);
const loadingTemplates = useObserver(() => dashboardStore.loadingTemplates);
const [activeCategory, setActiveCategory] = React.useState<any>();
const [selectAllCheck, setSelectAllCheck] = React.useState(false);
const selectedWidgetIds = useObserver(() =>
dashboardStore.selectedWidgets.map((widget: any) => widget.metricId),
);
const scrollContainer = React.useRef<HTMLDivElement>(null);
useEffect(() => {
dashboardStore?.fetchTemplates(true).then((categories) => {
setActiveCategory(categories[0]);
});
}, []);
useEffect(() => {
if (scrollContainer.current) {
scrollContainer.current.scrollTop = 0;
}
}, [activeCategory, scrollContainer.current]);
const handleWidgetCategoryClick = (category: any) => {
setActiveCategory(category);
setSelectAllCheck(false);
};
const toggleAllWidgets = ({ target: { checked } }) => {
setSelectAllCheck(checked);
if (checked) {
dashboardStore.selectWidgetsByCategory(activeCategory.name);
} else {
dashboardStore.removeSelectedWidgetByCategory(activeCategory);
}
};
return useObserver(() => (
<Loader loading={loadingTemplates}>
<div className="grid grid-cols-12 gap-4 my-3 items-end">
<div className="col-span-3">
<div className="uppercase color-gray-medium text-lg">{t('Type')}</div>
</div>
<div className="col-span-9 flex items-center">
{activeCategory && (
<>
<div className="flex items-baseline">
<h2 className="text-2xl capitalize">{activeCategory.name}</h2>
<span className="text-2xl color-gray-medium ml-2">
{activeCategory.widgets.length}
</span>
</div>
<div className="ml-auto">
<label className="flex items-center ml-3 cursor-pointer select-none">
<input
type="checkbox"
onChange={toggleAllWidgets}
checked={selectAllCheck}
/>
<div className="ml-2">{t('Select All')}</div>
</label>
</div>
</>
)}
</div>
</div>
<div className="grid grid-cols-12 gap-4">
<div className="col-span-3">
<div
className="grid grid-cols-1 gap-4 py-1 pr-2"
style={{
maxHeight: `calc(100vh - ${props.isDashboardExists ? 175 : 300}px)`,
overflowY: 'auto',
}}
>
{activeCategory &&
widgetCategories.map((category, index) => (
<WidgetCategoryItem
key={category.name}
onClick={handleWidgetCategoryClick}
category={category}
isSelected={activeCategory.name === category.name}
selectedWidgetIds={selectedWidgetIds}
/>
))}
</div>
</div>
<div className="col-span-9">
<div
className="grid grid-cols-4 gap-4 -mx-4 px-4 pb-40 items-start py-1"
style={{ maxHeight: 'calc(100vh - 170px)', overflowY: 'auto' }}
ref={scrollContainer}
>
{activeCategory &&
activeCategory.widgets.map((widget: any) => (
<WidgetWrapper
key={widget.metricId}
widget={widget}
active={selectedWidgetIds.includes(widget.metricId)}
isTemplate
isSaved={widget.metricType === 'predefined'}
onClick={() => dashboardStore.toggleWidgetSelection(widget)}
/>
))}
{props.isDashboardExists && activeCategory?.name === 'custom' && (
<div
className={cn(
'relative rounded border col-span-1 cursor-pointer',
'flex flex-col items-center justify-center bg-white',
'hover:bg-active-blue hover:shadow-border-main text-center py-16',
)}
onClick={props.handleCreateNew}
>
<Icon name="plus" size="16" />
<span className="mt-2">{t('Create Metric')}</span>
</div>
)}
</div>
</div>
</div>
</Loader>
));
}
export default DashboardMetricSelection;

View file

@ -1 +0,0 @@
export { default } from './DashboardMetricSelection';

View file

@ -1,109 +0,0 @@
import React from 'react';
import { useObserver } from 'mobx-react-lite';
import { Button } from 'antd';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { useStore } from 'App/mstore';
import { useModal } from 'App/components/Modal';
import { dashboardMetricCreate, withSiteId } from 'App/routes';
import DashboardForm from '../DashboardForm';
import DashboardMetricSelection from '../DashboardMetricSelection';
import { useTranslation } from 'react-i18next';
import { PANEL_SIZES } from 'App/constants/panelSizes'
interface Props extends RouteComponentProps {
history: any;
siteId?: string;
dashboardId?: string;
onMetricAdd?: () => void;
}
function DashboardModal(props: Props) {
const { t } = useTranslation();
const { history, siteId, dashboardId } = props;
const { dashboardStore } = useStore();
const selectedWidgetsCount = useObserver(
() => dashboardStore.selectedWidgets.length,
);
const { hideModal } = useModal();
const dashboard = useObserver(() => dashboardStore.dashboardInstance);
const loading = useObserver(() => dashboardStore.isSaving);
const onSave = () => {
dashboardStore
.save(dashboard)
.then(async (syncedDashboard) => {
if (dashboard.exists()) {
await dashboardStore.fetch(dashboard.dashboardId);
}
dashboardStore.selectDashboardById(syncedDashboard.dashboardId);
history.push(
withSiteId(`/dashboard/${syncedDashboard.dashboardId}`, siteId),
);
})
.then(hideModal);
};
const handleCreateNew = () => {
const path = withSiteId(dashboardMetricCreate(dashboardId), siteId);
props.onMetricAdd();
history.push(path);
hideModal();
};
const isDashboardExists = dashboard.exists();
return useObserver(() => (
<div style={{ maxWidth: '85vw' }}>
<div
className="border-r shadow p-4 h-screen"
style={{
backgroundColor: '#FAFAFA',
zIndex: 999,
width: '100%',
maxWidth: PANEL_SIZES.maxWidth,
}}
>
<div className="mb-6 flex items-end justify-between">
<div>
<h1 className="text-2xl">
{isDashboardExists
? t('Add metrics to dashboard')
: t('Create Dashboard')}
</h1>
</div>
<div>
<span className="text-md">{t('Past 7 days data')}</span>
</div>
</div>
{!isDashboardExists && (
<>
<DashboardForm />
<p>
{t(
'Create new dashboard by choosing from the range of predefined metrics that you care about. You can always add your custom metrics later.',
)}
</p>
</>
)}
<DashboardMetricSelection
handleCreateNew={handleCreateNew}
isDashboardExists={isDashboardExists}
/>
<div className="flex items-center absolute bottom-0 left-0 right-0 bg-white border-t p-3">
<Button
type="primary"
disabled={!dashboard.isValid || loading}
onClick={onSave}
className="flaot-left mr-2"
>
{isDashboardExists ? t('Add Selected to Dashboard') : t('Create')}
</Button>
<span className="ml-2 color-gray-medium">
{selectedWidgetsCount}&nbsp;{t('Metrics')}
</span>
</div>
</div>
</div>
));
}
export default withRouter(DashboardModal);

View file

@ -1 +0,0 @@
export { default } from './DashboardModal';

View file

@ -11,7 +11,6 @@ import withPageTitle from 'HOCs/withPageTitle';
import withReport from 'App/components/hocs/withReport';
import { useHistory } from 'react-router';
import DashboardHeader from '../DashboardHeader';
import DashboardModal from '../DashboardModal';
import DashboardWidgetGrid from '../DashboardWidgetGrid';
import AiQuery from './AiQuery';
import { PANEL_SIZES } from 'App/constants/panelSizes'
@ -69,15 +68,18 @@ function DashboardView(props: Props) {
onAddWidgets();
trimQuery();
}
dashboardStore.resetDensity();
return () => dashboardStore.resetSelectedDashboard();
}, []);
useEffect(() => {
const isExists = dashboardStore.getDashboardById(dashboardId);
if (!isExists) {
history.push(withSiteId('/dashboard', siteId));
}
const isExists = async () => dashboardStore.getDashboardById(dashboardId);
isExists().then((res) => {
if (!res) {
history.push(withSiteId('/dashboard', siteId));
}
})
}, [dashboardId]);
useEffect(() => {
@ -85,18 +87,6 @@ function DashboardView(props: Props) {
dashboardStore.fetch(dashboard.dashboardId);
}, [dashboard]);
const onAddWidgets = () => {
dashboardStore.initDashboard(dashboard);
showModal(
<DashboardModal
siteId={siteId}
onMetricAdd={pushQuery}
dashboardId={dashboardId}
/>,
{ right: true },
);
};
if (!dashboard) return null;
const originStr = window.env.ORIGIN || window.location.origin;
@ -117,7 +107,6 @@ function DashboardView(props: Props) {
<DashboardWidgetGrid
siteId={siteId}
dashboardId={dashboardId}
onEditHandler={onAddWidgets}
id="report"
/>
</div>

View file

@ -1,129 +0,0 @@
import React from 'react';
import { observer } from 'mobx-react-lite';
import { Loader } from 'UI';
import { Button } from 'antd';
import WidgetWrapper from 'App/components/Dashboard/components/WidgetWrapper';
import { useStore } from 'App/mstore';
import { useModal } from 'App/components/Modal';
import { dashboardMetricCreate, withSiteId } from 'App/routes';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
interface IProps extends RouteComponentProps {
siteId: string;
title: string;
description: string;
}
function AddMetric({ history, siteId, title, description }: IProps) {
const { t } = useTranslation();
const [metrics, setMetrics] = React.useState<Record<string, any>[]>([]);
const { dashboardStore } = useStore();
const { hideModal } = useModal();
React.useEffect(() => {
dashboardStore?.fetchTemplates(true).then((cats: any[]) => {
const customMetrics =
cats.find((category) => category.name === 'custom')?.widgets || [];
setMetrics(customMetrics);
});
}, []);
const dashboard = dashboardStore.selectedDashboard;
const selectedWidgetIds = dashboardStore.selectedWidgets.map(
(widget: any) => widget.metricId,
);
const queryParams = new URLSearchParams(location.search);
const onSave = () => {
if (selectedWidgetIds.length === 0) return;
dashboardStore
.save(dashboard)
.then(async (syncedDashboard: Record<string, any>) => {
if (dashboard.exists()) {
await dashboardStore.fetch(dashboard.dashboardId);
}
dashboardStore.selectDashboardById(syncedDashboard.dashboardId);
})
.then(hideModal);
};
const onCreateNew = () => {
const path = withSiteId(
dashboardMetricCreate(dashboard.dashboardId),
siteId,
);
if (!queryParams.has('modal')) history.push('?modal=addMetric');
history.push(path);
hideModal();
};
return (
<div style={{ maxWidth: '85vw', width: 1200 }}>
<div
className="border-l shadow h-screen"
style={{ backgroundColor: '#FAFAFA', zIndex: 999, width: '100%' }}
>
<div className="py-6 px-8 flex items-start justify-between">
<div className="flex flex-col">
<h1 className="text-2xl">{title}</h1>
<div className="text-disabled-text">{description}</div>
</div>
<Button
variant="text"
className="text-main font-medium ml-2"
onClick={onCreateNew}
>
+&nbsp;{t('Create New')}
</Button>
</div>
<Loader loading={dashboardStore.loadingTemplates}>
<div
className="grid h-full grid-cols-4 gap-4 px-8 items-start py-1"
style={{
maxHeight: 'calc(100vh - 160px)',
overflowY: 'auto',
gridAutoRows: 'max-content',
}}
>
{metrics ? (
metrics.map((metric: any) => (
<WidgetWrapper
key={metric.metricId}
widget={metric}
active={selectedWidgetIds.includes(metric.metricId)}
isTemplate
isSaved={metric.metricType === 'predefined'}
onClick={() => dashboardStore.toggleWidgetSelection(metric)}
/>
))
) : (
<div>{t('No custom metrics created.')}</div>
)}
</div>
</Loader>
<div className="py-4 border-t px-8 bg-white w-full flex items-center justify-between">
<div>
{t('Selected')}
<span className="font-medium">{selectedWidgetIds.length}</span>
&nbsp;{t('out of')}&nbsp;
<span className="font-medium">{metrics ? metrics.length : 0}</span>
</div>
<Button
type="primary"
disabled={selectedWidgetIds.length === 0}
onClick={onSave}
>
{t('Add Selected')}
</Button>
</div>
</div>
</div>
);
}
export default withRouter(observer(AddMetric));

View file

@ -1,138 +0,0 @@
import React from 'react';
import { observer } from 'mobx-react-lite';
import { Icon } from 'UI';
import { useModal } from 'App/components/Modal';
import { useStore } from 'App/mstore';
import cn from 'classnames';
import AddMetric from './AddMetric';
import AddPredefinedMetric from './AddPredefinedMetric';
interface AddMetricButtonProps {
iconName: 'bar-pencil' | 'grid-check';
title: string;
description: string;
isPremade?: boolean;
isPopup?: boolean;
onClick: () => void;
}
function AddMetricButton({
iconName,
title,
description,
onClick,
isPremade,
isPopup,
}: AddMetricButtonProps) {
return (
<div
onClick={onClick}
className={cn(
'flex items-center hover:bg-gray-lightest group rounded border cursor-pointer',
isPremade
? 'bg-figmaColors-primary-outlined-hover-background hover:!border-tealx'
: 'hover:!border-teal bg-figmaColors-secondary-outlined-hover-background',
isPopup ? 'p-4 z-50' : 'px-4 py-8 flex-col',
)}
style={{ borderColor: 'rgb(238, 238, 238)' }}
>
<div
className={cn(
'p-6 my-3 rounded-full group-hover:bg-gray-light',
isPremade
? 'bg-figmaColors-primary-outlined-hover-background fill-figmaColors-accent-secondary group-hover:!bg-figmaColors-accent-secondary group-hover:!fill-white'
: 'bg-figmaColors-secondary-outlined-hover-background fill-figmaColors-secondary-outlined-resting-border group-hover:!bg-teal group-hover:!fill-white',
)}
>
<Icon name={iconName} size={26} style={{ fill: 'inherit' }} />
</div>
<div
className={
isPopup
? 'flex flex-col text-left ml-4'
: 'flex flex-col text-center items-center'
}
>
<div className="font-bold text-base text-figmaColors-text-primary">
{title}
</div>
<div
className={cn(
'text-disabled-test text-figmaColors-text-primary text-base',
isPopup ? 'w-full' : 'mt-2 w-2/3 text-center',
)}
>
{description}
</div>
</div>
</div>
);
}
interface Props {
siteId: string;
isPopup?: boolean;
onAction?: () => void;
}
function AddMetricContainer({ siteId, isPopup, onAction }: Props) {
const { showModal } = useModal();
const { dashboardStore } = useStore();
const onAddCustomMetrics = () => {
onAction?.();
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
showModal(
<AddMetric
siteId={siteId}
title="Custom Metrics"
description="Metrics that are manually created by you or your team."
/>,
{ right: true },
);
};
const onAddPredefinedMetrics = () => {
onAction?.();
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
showModal(
<AddPredefinedMetric
siteId={siteId}
title="Ready-Made Metrics"
description="Curated metrics predfined by OpenReplay."
/>,
{ right: true },
);
};
const classes = isPopup
? 'bg-white border rounded p-4 grid grid-rows-2 gap-4'
: 'bg-white border border-dashed hover:!border-gray-medium rounded p-8 grid grid-cols-2 gap-8';
return (
<div
style={{
borderColor: 'rgb(238, 238, 238)',
height: isPopup ? undefined : 300,
}}
className={classes}
>
<AddMetricButton
title="+ Add Custom Metric"
description="Metrics that are manually created by you or your team"
iconName="bar-pencil"
onClick={onAddCustomMetrics}
isPremade
isPopup={isPopup}
/>
<AddMetricButton
title="+ Add Ready-Made Metric"
description="Curated metrics predfined by OpenReplay."
iconName="grid-check"
onClick={onAddPredefinedMetrics}
isPopup={isPopup}
/>
</div>
);
}
export default observer(AddMetricContainer);

View file

@ -7,7 +7,7 @@ import { useStore } from 'App/mstore';
import { useModal } from 'App/components/Modal';
import { dashboardMetricCreate, withSiteId } from 'App/routes';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { WidgetCategoryItem } from 'App/components/Dashboard/components/DashboardMetricSelection/DashboardMetricSelection';
import { WidgetCategoryItem } from 'App/components/Dashboard/components/WidgetCategoryItem';
import { useTranslation } from 'react-i18next';
interface IProps extends RouteComponentProps {

View file

@ -12,7 +12,6 @@ import { useTranslation } from 'react-i18next';
interface Props {
siteId: string;
dashboardId: string;
onEditHandler: () => void;
id?: string;
}

View file

@ -56,7 +56,7 @@ function ExcludeFilters(props: Props) {
))}
</div>
) : (
<Button type="link" onClick={addPageFilter}>
<Button type="link" onClick={addPageFilter} className="!text-black">
{t('Add Exclusion')}
</Button>
)}

View file

@ -21,7 +21,7 @@ function FunnelIssuesSelectedFilters(props: Props) {
key={index}
closable
onClose={() => removeSelectedValue(option.value)}
className="select-none rounded-lg text-base gap-1 bg-indigo-50 flex items-center"
className="select-none rounded-lg text-base gap-1 bg-indigo-lightest flex items-center"
>
{option.label}
</Tag>

View file

@ -1,37 +0,0 @@
import React from 'react';
import WidgetWrapper from 'App/components/Dashboard/components/WidgetWrapper';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { withSiteId } from 'App/routes';
interface Props extends RouteComponentProps {
list: any;
siteId: any;
selectedList: any;
}
function GridView(props: Props) {
const { siteId, list, selectedList, history } = props;
const onItemClick = (metricId: number) => {
const path = withSiteId(`/metrics/${metricId}`, siteId);
history.push(path);
};
return (
<div className="grid grid-cols-4 gap-4 m-4 items-start">
{list.map((metric: any) => (
<React.Fragment key={metric.metricId}>
<WidgetWrapper
key={metric.metricId}
widget={metric}
isGridView
active={selectedList.includes(metric.metricId)}
isSaved
onClick={() => onItemClick(parseInt(metric.metricId))}
/>
</React.Fragment>
))}
</div>
);
}
export default withRouter(GridView);

View file

@ -0,0 +1,56 @@
import React from 'react';
import { useObserver } from 'mobx-react-lite';
import { Icon } from 'UI';
import cn from 'classnames';
interface IWiProps {
category: Record<string, any>;
onClick: (category: Record<string, any>) => void;
isSelected: boolean;
selectedWidgetIds: string[];
}
const ICONS: Record<string, string | null> = {
errors: 'errors-icon',
performance: 'performance-icon',
resources: 'resources-icon',
overview: null,
custom: null,
};
export function WidgetCategoryItem({
category,
isSelected,
onClick,
selectedWidgetIds,
}: IWiProps) {
const selectedCategoryWidgetsCount = useObserver(
() =>
category.widgets.filter((widget: any) =>
selectedWidgetIds.includes(widget.metricId),
).length,
);
return (
<div
className={cn('rounded p-4 border cursor-pointer hover:bg-active-blue', {
'bg-active-blue border-blue': isSelected,
'bg-white': !isSelected,
})}
onClick={() => onClick(category)}
>
<div className="font-medium text-lg mb-2 capitalize flex items-center">
{/* @ts-ignore */}
{ICONS[category.name] && (
<Icon name={ICONS[category.name]} size={18} className="mr-2" />
)}
{category.name}
</div>
<div className="mb-2 text-sm leading-tight">{category.description}</div>
{selectedCategoryWidgetsCount > 0 && (
<div className="flex items-center">
<span className="color-gray-medium text-sm">{`Selected ${selectedCategoryWidgetsCount} of ${category.widgets.length}`}</span>
</div>
)}
</div>
);
}

View file

@ -0,0 +1 @@
export { WidgetCategoryItem } from './WidgetCategoryItem';

View file

@ -44,6 +44,7 @@ interface Props {
isSaved?: boolean;
isTemplate?: boolean;
isPreview?: boolean;
height?: number;
}
function WidgetChart(props: Props) {
@ -52,10 +53,10 @@ function WidgetChart(props: Props) {
triggerOnce: true,
rootMargin: '200px 0px',
});
const { isSaved = false, metric, isTemplate } = props;
const { isSaved = false, metric, isTemplate, height } = props;
const { dashboardStore, metricStore } = useStore();
const _metric: any = props.isPreview ? metricStore.instance : props.metric;
const { data } = _metric;
const _metric: any = props.metric;
const data = _metric.data;
const { period } = dashboardStore;
const { drillDownPeriod } = dashboardStore;
const { drillDownFilter } = dashboardStore;
@ -158,7 +159,7 @@ function WidgetChart(props: Props) {
}, 4000);
dashboardStore
.fetchMetricChartData(metric, payload, isSaved, period, isComparison)
.then((res: any) => {
.then((res) => {
if (isComparison) setCompData(res);
clearTimeout(tm);
setStale(false);
@ -181,10 +182,10 @@ function WidgetChart(props: Props) {
}
prevMetricRef.current = _metric;
const timestmaps = drillDownPeriod.toTimestamps();
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
const density = dashboardStore.selectedDensity;
const payload = isSaved
? { ...metricParams, density }
: { ...params, ...timestmaps, ..._metric.toJson(), density };
? { ...metricParams, density }
: { ...params, ...timestmaps, ..._metric.toJson(), density };
debounceRequest(
_metric,
payload,
@ -283,6 +284,7 @@ function WidgetChart(props: Props) {
hideLegend
onClick={onChartClick}
label={t('Conversion')}
height={height}
/>
);
}
@ -293,6 +295,7 @@ function WidgetChart(props: Props) {
data={data}
compData={compData}
isWidget={isSaved || isTemplate}
height={height}
/>
);
}
@ -308,6 +311,7 @@ function WidgetChart(props: Props) {
metric={defaultMetric}
data={data}
predefinedKey={_metric.metricOf}
height={height}
/>
);
}
@ -331,6 +335,7 @@ function WidgetChart(props: Props) {
compData={compDataCopy}
onSeriesFocus={onFocus}
onClick={onChartClick}
height={height}
label={
_metric.metricOf === 'sessionCount'
? t('Number of Sessions')
@ -360,6 +365,7 @@ function WidgetChart(props: Props) {
return (
<BarChart
inGrid={!props.isPreview}
height={height}
data={chartData}
compData={compDataCopy}
params={params}
@ -378,6 +384,7 @@ function WidgetChart(props: Props) {
if (viewType === 'progressChart') {
return (
<ColumnChart
height={height}
inGrid={!props.isPreview}
horizontal
data={chartData}
@ -396,6 +403,7 @@ function WidgetChart(props: Props) {
if (viewType === 'pieChart') {
return (
<PieChart
height={height}
inGrid={!props.isPreview}
data={chartData}
onSeriesFocus={onFocus}
@ -412,6 +420,7 @@ function WidgetChart(props: Props) {
<CustomMetricPercentage
inGrid={!props.isPreview}
data={data[0]}
height={height}
colors={colors}
params={params}
label={
@ -451,6 +460,7 @@ function WidgetChart(props: Props) {
return (
<BugNumChart
values={values}
height={height}
inGrid={!props.isPreview}
colors={colors}
onSeriesFocus={onFocus}
@ -470,6 +480,7 @@ function WidgetChart(props: Props) {
<CustomMetricTableSessions
metric={_metric}
data={data}
height={height}
isTemplate={isTemplate}
isEdit={!isSaved && !isTemplate}
/>
@ -480,6 +491,7 @@ function WidgetChart(props: Props) {
<CustomMetricTableErrors
metric={_metric}
data={data}
height={height}
// isTemplate={isTemplate}
isEdit={!isSaved && !isTemplate}
/>
@ -490,6 +502,7 @@ function WidgetChart(props: Props) {
<SessionsBy
metric={_metric}
data={data}
height={height}
onClick={onChartClick}
isTemplate={isTemplate}
/>
@ -518,18 +531,18 @@ function WidgetChart(props: Props) {
</div>
);
}
return <ClickMapCard />;
return <ClickMapCard height={height} />;
}
if (metricType === INSIGHTS) {
return <InsightsCard data={data} />;
return <InsightsCard height={height} data={data} />;
}
if (metricType === USER_PATH && data && data.links) {
const isUngrouped = props.isPreview
? !(_metric.hideExcess ?? true)
: false;
const height = props.isPreview ? 550 : 240;
const height = props.height ? props.height : props.isPreview ? 550 : 240;
return (
<SankeyChart
height={height}
@ -548,6 +561,7 @@ function WidgetChart(props: Props) {
if (viewType === 'trend') {
return (
<LineChart
height={height}
data={data}
colors={colors}
params={params}
@ -561,7 +575,7 @@ function WidgetChart(props: Props) {
}
console.log('Unknown metric type', metricType);
return <div>{t('Unknown metric type')}</div>;
}, [data, compData, enabledRows, _metric]);
}, [data, compData, enabledRows, _metric, data]);
const showTable =
_metric.metricType === TIMESERIES &&

View file

@ -6,7 +6,6 @@ import { Space } from 'antd';
import { CUSTOM_RANGE, DATE_RANGE_COMPARISON_OPTIONS } from 'App/dateRange';
import Period from 'Types/app/period';
import RangeGranularity from './RangeGranularity';
import { useTranslation } from 'react-i18next';
function WidgetDateRange({
label = 'Time Range',

View file

@ -1,13 +1,12 @@
import React, { useRef, lazy } from 'react';
import cn from 'classnames';
import { ItemMenu, TextEllipsis } from 'UI';
import { TextEllipsis } from 'UI';
import { useDrag, useDrop } from 'react-dnd';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { withSiteId, dashboardMetricDetails } from 'App/routes';
import { FilterKey } from 'App/types/filter/filterType';
import { TIMESERIES } from 'App/constants/card';
import TemplateOverlay from './TemplateOverlay';
const WidgetChart = lazy(
@ -45,7 +44,6 @@ function WidgetWrapper(props: Props & RouteComponentProps) {
isGridView = false,
} = props;
const { widget } = props;
const isTimeSeries = widget.metricType === TIMESERIES;
const isPredefined = widget.metricType === 'predefined';
const dashboard = dashboardStore.selectedDashboard;
@ -73,13 +71,6 @@ function WidgetWrapper(props: Props & RouteComponentProps) {
}),
});
const onDelete = async () => {
dashboardStore.deleteDashboardWidget(
dashboard?.dashboardId!,
widget.widgetId,
);
};
const onChartClick = () => {
if (!isSaved || isPredefined) return;

View file

@ -38,7 +38,7 @@ interface Props {
isSaved?: boolean;
}
function WidgetWrapperNew(props: Props & RouteComponentProps) {
function WidgetWrapperDashboard(props: Props & RouteComponentProps) {
const { dashboardStore, metricStore } = useStore();
const {
isWidget = false,
@ -178,4 +178,4 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
);
}
export default withRouter(observer(WidgetWrapperNew));
export default withRouter(observer(WidgetWrapperDashboard));

View file

@ -1,11 +1,10 @@
import React from 'react';
import { Table, Tooltip } from 'antd';
import { Table, Dropdown } from 'antd';
import type { TableProps } from 'antd';
import Widget from 'App/mstore/types/widget';
import Funnel from 'App/mstore/types/funnel';
import { ItemMenu } from 'UI';
import { EllipsisVertical } from 'lucide-react';
import { exportAntCsv } from '../../../utils';
import { exportAntCsv } from 'App/utils';
import { useTranslation } from 'react-i18next';
interface Props {
@ -111,19 +110,20 @@ export function TableExporter({
const { t } = useTranslation();
const onClick = () => exportAntCsv(tableColumns, tableData, filename);
return (
<Tooltip title={t('Export Data to CSV')}>
<div className={`absolute ${top || 'top-0'} ${right || '-right-1'}`}>
<ItemMenu
items={[{ icon: 'download', text: 'Export to CSV', onClick }]}
bold
customTrigger={
<div className="flex items-center justify-center bg-gradient-to-r from-[#fafafa] to-neutral-200 cursor-pointer rounded-lg h-[38px] w-[38px] btn-export-table-data">
<EllipsisVertical size={16} />
</div>
}
/>
</div>
</Tooltip>
<div
className={`absolute ${top || 'top-0'} ${right || '-right-1'}`}
style={{ zIndex: 10 }}
>
<Dropdown
menu={{
items: [{ key: 'download', label: 'Export to CSV', onClick }],
}}
>
<div className="flex items-center justify-center bg-gray-lighter cursor-pointer rounded-lg h-[38px] w-[38px] btn-export-table-data">
<EllipsisVertical size={16} />
</div>
</Dropdown>
</div>
);
}

View file

@ -1,21 +1,22 @@
import React from 'react';
import { useModal } from 'App/components/Modal';
import { MessagesSquare, Trash } from 'lucide-react';
import ChatHeader from './components/ChatHeader';
import { PANEL_SIZES } from 'App/constants/panelSizes';
import ChatLog from './components/ChatLog';
import IntroSection from './components/IntroSection';
import { useQuery } from '@tanstack/react-query';
import { kaiService } from 'App/services';
import { toast } from 'react-toastify';
import { useStore } from 'App/mstore';
import { observer } from 'mobx-react-lite';
import { useHistory, useLocation } from 'react-router-dom';
import ChatsModal from './components/ChatsModal';
import { kaiStore } from './KaiStore';
function KaiChat() {
const { userStore, projectsStore } = useStore();
const history = useHistory();
const [chatTitle, setTitle] = React.useState<string | null>(null);
const chatTitle = kaiStore.chatTitle;
const setTitle = kaiStore.setTitle;
const userId = userStore.account.id;
const userLetter = userStore.account.name[0].toUpperCase();
const { activeSiteId } = projectsStore;
@ -99,7 +100,9 @@ function KaiChat() {
};
return (
<div className="w-full mx-auto" style={{ maxWidth: PANEL_SIZES.maxWidth }}>
<div className={'w-full rounded-lg overflow-hidden border shadow'}>
<div
className={'w-full rounded-lg overflow-hidden border shadow relative'}
>
<ChatHeader
chatTitle={chatTitle}
openChats={openChats}
@ -122,7 +125,7 @@ function KaiChat() {
threadId={threadId}
projectId={activeSiteId}
userLetter={userLetter}
onTitleChange={setTitle}
chatTitle={chatTitle}
initialMsg={initialMsg}
setInitialMsg={setInitialMsg}
/>
@ -133,69 +136,4 @@ function KaiChat() {
);
}
function ChatsModal({
onSelect,
projectId,
}: {
onSelect: (threadId: string, title: string) => void;
projectId: string;
}) {
const {
data = [],
isPending,
refetch,
} = useQuery({
queryKey: ['kai', 'chats', projectId],
queryFn: () => kaiService.getKaiChats(projectId),
staleTime: 1000 * 60,
});
const onDelete = async (id: string) => {
try {
await kaiService.deleteKaiChat(projectId, id);
} catch (e) {
toast.error("Something wen't wrong. Please try again later.");
}
refetch();
};
return (
<div className={'h-screen w-full flex flex-col gap-2 p-4'}>
<div className={'flex items-center font-semibold text-lg gap-2'}>
<MessagesSquare size={16} />
<span>Chats</span>
</div>
{isPending ? (
<div className="animate-pulse text-disabled-text">Loading chats...</div>
) : (
<div className="flex flex-col overflow-y-auto -mx-4 px-4">
{data.map((chat) => (
<div
key={chat.thread_id}
className="flex items-center relative group min-h-8"
>
<div
style={{ width: 270 - 28 - 4 }}
className="rounded-l pl-2 h-full w-full hover:bg-active-blue flex items-center"
>
<div
onClick={() => onSelect(chat.thread_id, chat.title)}
className="cursor-pointer hover:underline truncate"
>
{chat.title}
</div>
</div>
<div
onClick={() => onDelete(chat.thread_id)}
className="cursor-pointer opacity-0 group-hover:opacity-100 rounded-r h-full px-2 flex items-center group-hover:bg-active-blue"
>
<Trash size={14} className="text-disabled-text" />
</div>
</div>
))}
</div>
)}
</div>
);
}
export default observer(KaiChat);

View file

@ -3,7 +3,7 @@ import AiService from '@/services/AiService';
export default class KaiService extends AiService {
getKaiChats = async (
projectId: string,
): Promise<{ title: string; thread_id: string }[]> => {
): Promise<{ title: string; thread_id: string; datetime: string }[]> => {
const r = await this.client.get(`/kai/${projectId}/chats`);
if (!r.ok) {
throw new Error('Failed to fetch chats');
@ -26,15 +26,19 @@ export default class KaiService extends AiService {
getKaiChat = async (
projectId: string,
threadId: string,
): Promise<
{
): Promise<{
messages: {
role: string;
content: string;
message_id: any;
duration?: number;
duration: number;
feedback: boolean | null;
}[]
> => {
supports_visualization: boolean;
chart: string;
chart_data: string;
}[];
title: string;
}> => {
const r = await this.client.get(`/kai/${projectId}/chats/${threadId}`);
if (!r.ok) {
throw new Error('Failed to fetch chat');
@ -77,4 +81,46 @@ export default class KaiService extends AiService {
const data = await r.json();
return data;
};
getMsgChart = async (
messageId: string,
projectId: string,
): Promise<string> => {
const r = await this.client.get(
`/kai/${projectId}/chats/data/${messageId}`,
);
if (!r.ok) {
throw new Error('Failed to fetch chart data');
}
const data = await r.json();
return data;
};
saveChartData = async (
messageId: string,
projectId: string,
chartData: any,
) => {
const r = await this.client.post(
`/kai/${projectId}/chats/data/${messageId}`,
{
chart_data: JSON.stringify(chartData),
},
);
if (!r.ok) {
throw new Error('Failed to save chart data');
}
const data = await r.json();
return data;
};
checkUsage = async (): Promise<{ total: number; used: number }> => {
const r = await this.client.get(`/kai/usage`);
if (!r.ok) {
throw new Error('Failed to fetch usage');
}
const data = await r.json();
return data;
};
}

View file

@ -1,18 +1,46 @@
import { makeAutoObservable, runInAction } from 'mobx';
import { BotChunk, ChatManager, Message } from './SocketManager';
import { BotChunk, ChatManager } from './SocketManager';
import { kaiService as aiService, kaiService } from 'App/services';
import { toast } from 'react-toastify';
import Widget from 'App/mstore/types/widget';
export interface Message {
text: string;
isUser: boolean;
messageId: string;
/** filters to get chart */
chart: string;
/** chart data */
chart_data: string;
supports_visualization: boolean;
feedback: boolean | null;
duration: number;
}
export interface SentMessage
extends Omit<
Message,
'duration' | 'feedback' | 'chart' | 'supports_visualization'
> {
replace: boolean;
}
class KaiStore {
chatManager: ChatManager | null = null;
processingStage: BotChunk | null = null;
messages: Message[] = [];
messages: Array<Message> = [];
queryText = '';
chatTitle: string | null = null;
loadingChat = false;
replacing = false;
replacing: string | null = null;
usage = {
total: 0,
used: 0,
percent: 0,
};
constructor() {
makeAutoObservable(this);
this.checkUsage();
}
get lastHumanMessage() {
@ -29,6 +57,20 @@ class KaiStore {
return { msg, index };
}
get firstHumanMessage() {
let msg = null;
let index = null;
for (let i = 0; i < this.messages.length; i++) {
const message = this.messages[i];
if (message.isUser) {
msg = message;
index = i;
break;
}
}
return { msg, index };
}
get lastKaiMessage() {
let msg = null;
let index = null;
@ -43,6 +85,14 @@ class KaiStore {
return { msg, index };
}
getPreviousMessage = (messageId: string) => {
const index = this.messages.findIndex((msg) => msg.messageId === messageId);
if (index > 0) {
return this.messages[index - 1];
}
return null;
};
setQueryText = (text: string) => {
this.queryText = text;
};
@ -67,9 +117,9 @@ class KaiStore {
this.messages.push(message);
};
editMessage = (text: string) => {
editMessage = (text: string, messageId: string) => {
this.setQueryText(text);
this.setReplacing(true);
this.setReplacing(messageId);
};
replaceAtIndex = (message: Message, index: number) => {
@ -86,13 +136,21 @@ class KaiStore {
});
};
setTitle = (title: string | null) => {
this.chatTitle = title;
};
getChat = async (projectId: string, threadId: string) => {
this.setLoadingChat(true);
try {
const res = await aiService.getKaiChat(projectId, threadId);
if (res && res.length) {
const { messages, title } = await aiService.getKaiChat(
projectId,
threadId,
);
if (messages && messages.length) {
this.setTitle(title);
this.setMessages(
res.map((m) => {
messages.map((m) => {
const isUser = m.role === 'human';
return {
text: m.content,
@ -100,6 +158,9 @@ class KaiStore {
messageId: m.message_id,
duration: m.duration,
feedback: m.feedback,
chart: m.chart,
supports_visualization: m.supports_visualization,
chart_data: m.chart_data,
};
}),
);
@ -114,7 +175,6 @@ class KaiStore {
createChatManager = (
settings: { projectId: string; threadId: string },
setTitle: (title: string) => void,
initialMsg: string | null,
) => {
const token = kaiService.client.getJwt();
@ -122,21 +182,25 @@ class KaiStore {
console.error('No token found');
return;
}
this.checkUsage();
this.chatManager = new ChatManager({ ...settings, token });
this.chatManager.setOnMsgHook({
msgCallback: (msg) => {
if ('state' in msg) {
if (msg.type === 'state') {
if (msg.state === 'running') {
this.setProcessingStage({
content: 'Processing your request...',
stage: 'chart',
messageId: Date.now().toPrecision(),
duration: msg.start_time ? Date.now() - msg.start_time : 0,
type: 'chunk',
supports_visualization: false,
});
} else {
this.setProcessingStage(null);
}
} else {
}
if (msg.type === 'chunk') {
if (msg.stage === 'start') {
this.setProcessingStage({
...msg,
@ -153,13 +217,17 @@ class KaiStore {
messageId: msg.messageId,
duration: msg.duration,
feedback: null,
chart: '',
supports_visualization: msg.supports_visualization,
chart_data: '',
};
this.bumpUsage();
this.addMessage(msgObj);
this.setProcessingStage(null);
}
}
},
titleCallback: setTitle,
titleCallback: this.setTitle,
});
if (initialMsg) {
@ -167,13 +235,24 @@ class KaiStore {
}
};
setReplacing = (replacing: boolean) => {
setReplacing = (replacing: string | null) => {
this.replacing = replacing;
};
bumpUsage = () => {
this.usage.used += 1;
this.usage.percent = Math.min(
(this.usage.used / this.usage.total) * 100,
100,
);
if (this.usage.used >= this.usage.total) {
toast.error('You have reached the daily limit for queries.');
}
};
sendMessage = (message: string) => {
if (this.chatManager) {
this.chatManager.sendMessage(message, this.replacing);
this.chatManager.sendMessage(message, !!this.replacing);
}
if (this.replacing) {
console.log(
@ -197,6 +276,9 @@ class KaiStore {
messageId: Date.now().toString(),
feedback: null,
duration: 0,
supports_visualization: false,
chart: '',
chart_data: '',
});
};
@ -251,6 +333,66 @@ class KaiStore {
this.chatManager = null;
}
};
charts = new Map<string, Record<string, any>>();
getMessageChart = async (msgId: string, projectId: string) => {
this.setProcessingStage({
content: 'Generating visualization...',
stage: 'chart',
messageId: msgId,
duration: 0,
type: 'chunk',
supports_visualization: false,
});
try {
const filtersStr = await kaiService.getMsgChart(msgId, projectId);
if (!filtersStr.length) {
throw new Error('No filters found for the message');
}
const filters = JSON.parse(filtersStr);
const data = {
...filters,
};
const metric = new Widget().fromJson(data);
this.charts.set(msgId, data);
return metric;
} catch (e) {
console.error(e);
throw new Error('Failed to generate visualization');
} finally {
this.setProcessingStage(null);
}
};
saveLatestChart = async (msgId: string, projectId: string) => {
const data = this.charts.get(msgId);
if (data) {
try {
await kaiService.saveChartData(msgId, projectId, data);
this.charts.delete(msgId);
} catch (e) {
console.error(e);
}
}
};
getParsedChart = (data: string) => {
const parsedData = JSON.parse(data);
return new Widget().fromJson(parsedData);
};
setUsage = (usage: { total: number; used: number; percent: number }) => {
this.usage = usage;
};
checkUsage = async () => {
try {
const { total, used } = await kaiService.checkUsage();
this.setUsage({ total, used, percent: Math.round((used / total) * 100) });
} catch (e) {
console.error(e);
}
};
}
export const kaiStore = new KaiStore();

View file

@ -1,4 +1,5 @@
import io from 'socket.io-client';
import { toast } from 'react-toastify';
export class ChatManager {
socket: ReturnType<typeof io>;
@ -41,6 +42,9 @@ export class ChatManager {
console.log('Disconnected from server');
});
socket.on('error', (err) => {
if (err.message) {
toast.error(err.message);
}
console.error('Socket error:', err);
});
@ -74,12 +78,12 @@ export class ChatManager {
titleCallback,
}: {
msgCallback: (
msg: BotChunk | { state: string; type: 'state'; start_time?: number },
msg: StateEvent | BotChunk,
) => void;
titleCallback: (title: string) => void;
}) => {
this.socket.on('chunk', (msg: BotChunk) => {
msgCallback(msg);
msgCallback({ ...msg, type: 'chunk' });
});
this.socket.on('title', (msg: { content: string }) => {
titleCallback(msg.content);
@ -105,16 +109,13 @@ export interface BotChunk {
stage: 'start' | 'chart' | 'final' | 'title';
content: string;
messageId: string;
duration?: number;
}
export interface Message {
text: string;
isUser: boolean;
messageId: string;
duration?: number;
feedback: boolean | null;
duration: number;
supports_visualization: boolean;
type: 'chunk'
}
export interface SentMessage extends Message {
replace: boolean;
interface StateEvent {
state: string;
start_time?: number;
type: 'state';
}

View file

@ -1,6 +1,7 @@
import React from 'react';
import { Icon } from 'UI';
import { MessagesSquare, ArrowLeft } from 'lucide-react';
import { useTranslation } from 'react-i18next';
function ChatHeader({
openChats = () => {},
@ -11,6 +12,7 @@ function ChatHeader({
openChats?: () => void;
chatTitle: string | null;
}) {
const { t } = useTranslation();
return (
<div
className={
@ -20,17 +22,21 @@ function ChatHeader({
<div className={'flex-1'}>
{goBack ? (
<div
className={'flex items-center gap-2 font-semibold cursor-pointer'}
className={
'w-fit flex items-center gap-2 font-semibold cursor-pointer'
}
onClick={goBack}
>
<ArrowLeft size={14} />
<div>Back</div>
<div>{t('Back')}</div>
</div>
) : null}
</div>
<div className={'flex items-center gap-2 mx-auto max-w-[80%]'}>
{chatTitle ? (
<div className="font-semibold text-xl whitespace-nowrap truncate">{chatTitle}</div>
<div className="font-semibold text-xl whitespace-nowrap truncate">
{chatTitle}
</div>
) : (
<>
<Icon name={'kai_colored'} size={18} />
@ -38,14 +44,14 @@ function ChatHeader({
</>
)}
</div>
<div
className={
'font-semibold cursor-pointer flex items-center gap-2 flex-1 justify-end'
}
onClick={openChats}
>
<MessagesSquare size={14} />
<div>Chats</div>
<div className={'flex-1 justify-end flex items-center gap-2'}>
<div
className="font-semibold w-fit cursor-pointer flex items-center gap-2"
onClick={openChats}
>
<MessagesSquare size={14} />
<div>{t('Chats')}</div>
</div>
</div>
</div>
);

View file

@ -1,55 +1,114 @@
import React from 'react'
import { Button, Input } from "antd";
import { SendHorizonal, OctagonX } from "lucide-react";
import { kaiStore } from "../KaiStore";
import { observer } from "mobx-react-lite";
import React from 'react';
import { Button, Input, Tooltip } from 'antd';
import { SendHorizonal, OctagonX } from 'lucide-react';
import { kaiStore } from '../KaiStore';
import { observer } from 'mobx-react-lite';
import Usage from './Usage';
function ChatInput({ isLoading, onSubmit, threadId }: { isLoading?: boolean, onSubmit: (str: string) => void, threadId: string }) {
const inputRef = React.useRef<Input>(null);
function ChatInput({
isLoading,
onSubmit,
threadId,
}: {
isLoading?: boolean;
onSubmit: (str: string) => void;
threadId: string;
}) {
const inputRef = React.useRef<typeof Input>(null);
const usage = kaiStore.usage;
const limited = usage.percent >= 100;
const inputValue = kaiStore.queryText;
const isProcessing = kaiStore.processingStage !== null
const isProcessing = kaiStore.processingStage !== null;
const setInputValue = (text: string) => {
kaiStore.setQueryText(text)
}
kaiStore.setQueryText(text);
};
const submit = () => {
if (limited) {
return;
}
if (isProcessing) {
const settings = { projectId: '2325', userId: '0', threadId, };
void kaiStore.cancelGeneration(settings)
const settings = { projectId: '2325', userId: '0', threadId };
void kaiStore.cancelGeneration(settings);
} else {
if (inputValue.length > 0) {
onSubmit(inputValue)
setInputValue('')
onSubmit(inputValue);
setInputValue('');
}
}
}
};
const cancelReplace = () => {
setInputValue('');
kaiStore.setReplacing(null);
};
React.useEffect(() => {
if (inputRef.current) {
inputRef.current.focus()
inputRef.current.focus();
}
}, [inputValue])
}, [inputValue]);
const isReplacing = kaiStore.replacing !== null;
return (
<Input
onPressEnter={submit}
ref={inputRef}
placeholder={'Ask anything about your product and users...'}
size={'large'}
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
suffix={
<Button
loading={isLoading}
onClick={submit}
icon={isProcessing ? <OctagonX size={16} /> : <SendHorizonal size={16} />}
type={'text'}
size={'small'}
shape={'circle'}
/>
}
/>
)
<div className="relative">
<Input
onPressEnter={submit}
onKeyDown={(e) => {
if (e.key === 'Escape') {
cancelReplace();
}
}}
ref={inputRef}
placeholder={
limited
? `You've reached the daily limit for queries, come again tomorrow!`
: 'Ask anything about your product and users...'
}
size={'large'}
disabled={limited}
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
suffix={
<>
{isReplacing ? (
<Tooltip title={'Cancel Editing'}>
<Button
onClick={cancelReplace}
icon={<OctagonX size={16} />}
type={'text'}
size={'small'}
shape={'circle'}
disabled={limited}
/>
</Tooltip>
) : null}
<Tooltip title={'Send message'}>
<Button
loading={isLoading}
onClick={submit}
disabled={limited}
icon={
isProcessing ? (
<OctagonX size={16} />
) : (
<SendHorizonal size={16} />
)
}
type={'text'}
size={'small'}
shape={'circle'}
/>
</Tooltip>
</>
}
/>
<div className="absolute ml-1 top-2 -right-11">
<Usage />
</div>
</div>
);
}
export default observer(ChatInput)
export default observer(ChatInput);

View file

@ -1,6 +1,6 @@
import React from 'react';
import ChatInput from './ChatInput';
import { ChatMsg, ChatNotice } from './ChatMsg';
import ChatMsg, { ChatNotice } from './ChatMsg';
import { Loader } from 'UI';
import { kaiStore } from '../KaiStore';
import { observer } from 'mobx-react-lite';
@ -9,16 +9,16 @@ function ChatLog({
projectId,
threadId,
userLetter,
onTitleChange,
initialMsg,
chatTitle,
setInitialMsg,
}: {
projectId: string;
threadId: any;
userLetter: string;
onTitleChange: (title: string | null) => void;
initialMsg: string | null;
setInitialMsg: (msg: string | null) => void;
chatTitle: string | null;
}) {
const messages = kaiStore.messages;
const loading = kaiStore.loadingChat;
@ -31,7 +31,7 @@ function ChatLog({
void kaiStore.getChat(settings.projectId, threadId);
}
if (threadId) {
kaiStore.createChatManager(settings, onTitleChange, initialMsg);
kaiStore.createChatManager(settings, initialMsg);
}
return () => {
kaiStore.clearChat();
@ -61,17 +61,19 @@ function ChatLog({
>
<div className={'flex flex-col gap-4 w-2/3 min-h-max'}>
{messages.map((msg, index) => (
<ChatMsg
key={index}
text={msg.text}
isUser={msg.isUser}
userName={userLetter}
messageId={msg.messageId}
isLast={index === lastHumanMsgInd}
duration={msg.duration}
feedback={msg.feedback}
siteId={projectId}
/>
<React.Fragment key={msg.messageId ?? index}>
<ChatMsg
userName={userLetter}
siteId={projectId}
message={msg}
chatTitle={chatTitle}
canEdit={
processingStage === null &&
msg.isUser &&
index === lastHumanMsgInd
}
/>
</React.Fragment>
))}
{processingStage ? (
<ChatNotice

View file

@ -1,5 +1,6 @@
import React from 'react';
import { Icon, CopyButton } from 'UI';
import { observer } from 'mobx-react-lite';
import cn from 'classnames';
import Markdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
@ -10,35 +11,51 @@ import {
ListRestart,
FileDown,
Clock,
ChartLine,
} from 'lucide-react';
import { Button, Tooltip } from 'antd';
import { kaiStore } from '../KaiStore';
import { kaiStore, Message } from '../KaiStore';
import { toast } from 'react-toastify';
import { durationFormatted } from 'App/date';
import WidgetChart from '@/components/Dashboard/components/WidgetChart';
import Widget from 'App/mstore/types/widget';
import { useTranslation } from 'react-i18next';
export function ChatMsg({
text,
isUser,
function ChatMsg({
userName,
messageId,
isLast,
duration,
feedback,
siteId,
canEdit,
message,
chatTitle,
}: {
text: string;
isUser: boolean;
messageId: string;
message: Message;
userName?: string;
isLast?: boolean;
duration?: number;
feedback: boolean | null;
canEdit?: boolean;
siteId: string;
chatTitle: string | null;
}) {
const { t } = useTranslation();
const [metric, setMetric] = React.useState<Widget | null>(null);
const [loadingChart, setLoadingChart] = React.useState(false);
const {
text,
isUser,
messageId,
duration,
feedback,
supports_visualization,
chart_data,
} = message;
const isEditing = kaiStore.replacing && messageId === kaiStore.replacing;
const [isProcessing, setIsProcessing] = React.useState(false);
const bodyRef = React.useRef<HTMLDivElement>(null);
const onRetry = () => {
kaiStore.editMessage(text);
const chartRef = React.useRef<HTMLDivElement>(null);
const onEdit = () => {
kaiStore.editMessage(text, messageId);
};
const onCancelEdit = () => {
kaiStore.setQueryText('');
kaiStore.setReplacing(null);
};
const onFeedback = (feedback: 'like' | 'dislike', messageId: string) => {
kaiStore.sendMsgFeedback(feedback, messageId, siteId);
@ -51,19 +68,68 @@ export function ChatMsg({
setIsProcessing(false);
return;
}
const userPrompt = kaiStore.getPreviousMessage(message.messageId);
import('jspdf')
.then(({ jsPDF }) => {
.then(async ({ jsPDF }) => {
const doc = new jsPDF();
doc.addImage('/assets/img/logo-img.png', 80, 3, 30, 5);
doc.html(bodyRef.current!, {
const blockWidth = 170; // mm
doc.addImage('/assets/img/logo-img.png', 20, 15, 30, 5);
const content = bodyRef.current!.cloneNode(true) as HTMLElement;
if (userPrompt) {
const titleHeader = document.createElement('h2');
titleHeader.textContent = userPrompt.text;
titleHeader.style.marginBottom = '10px';
content.prepend(titleHeader);
}
content.querySelectorAll('ul').forEach((ul) => {
const frag = document.createDocumentFragment();
ul.querySelectorAll('li').forEach((li) => {
const div = document.createElement('div');
div.textContent = '• ' + li.textContent;
frag.appendChild(div);
});
ul.replaceWith(frag);
});
content.querySelectorAll('h1,h2,h3,h4,h5,h6').forEach((el) => {
(el as HTMLElement).style.letterSpacing = '0.5px';
});
content.querySelectorAll('*').forEach((node) => {
node.childNodes.forEach((child) => {
if (child.nodeType === Node.TEXT_NODE) {
const txt = child.textContent || '';
const replaced = txt.replace(/-/g, '');
if (replaced !== txt) child.textContent = replaced;
}
});
});
if (metric && chartRef.current) {
const { default: html2canvas } = await import('html2canvas');
const metricContainer = chartRef.current;
const image = await html2canvas(metricContainer, {
backgroundColor: null,
scale: 2,
});
const imgData = image.toDataURL('image/png');
const imgHeight = (image.height * blockWidth) / image.width;
content.appendChild(
Object.assign(document.createElement('img'), {
src: imgData,
style: `width: ${blockWidth}mm; height: ${imgHeight}mm; margin-top: 10px;`,
}),
);
}
doc.html(content, {
callback: function (doc) {
doc.save('document.pdf');
doc.save((chatTitle ?? 'document') + '.pdf');
},
margin: [10, 10, 10, 10],
// top, bottom, ?, left
margin: [5, 10, 20, 20],
x: 0,
y: 0,
width: 190, // Target width
windowWidth: 675, // Window width for rendering
y: 15,
// Target width
width: blockWidth,
// Window width for rendering
windowWidth: 675,
});
})
.catch((e) => {
@ -74,17 +140,38 @@ export function ChatMsg({
setIsProcessing(false);
});
};
React.useEffect(() => {
if (chart_data) {
const metric = kaiStore.getParsedChart(chart_data);
setMetric(metric);
}
}, [chart_data]);
const getChart = async () => {
try {
setLoadingChart(true);
const metric = await kaiStore.getMessageChart(messageId, siteId);
setMetric(metric);
} catch (e) {
toast.error(e.message);
} finally {
setLoadingChart(false);
}
};
const metricData = metric?.data;
React.useEffect(() => {
if (!chart_data && metricData && metricData.values.length > 0) {
kaiStore.saveLatestChart(messageId, siteId);
}
}, [metricData, chart_data]);
return (
<div
className={cn(
'flex items-start gap-2',
isUser ? 'flex-row-reverse' : 'flex-row',
)}
>
<div className={cn('flex gap-2', isUser ? 'flex-row-reverse' : 'flex-row')}>
{isUser ? (
<div
className={
'rounded-full bg-main text-white min-w-8 min-h-8 flex items-center justify-center sticky top-0'
'rounded-full bg-main text-white min-w-8 min-h-8 max-h-8 max-w-8 flex items-center justify-center sticky top-0 mt-2 shadow'
}
>
<span className={'font-semibold'}>{userName}</span>
@ -92,28 +179,57 @@ export function ChatMsg({
) : (
<div
className={
'rounded-full bg-white shadow min-w-8 min-h-8 flex items-center justify-center sticky top-0'
'rounded-full bg-gray-lightest shadow min-w-8 min-h-8 max-h-8 max-w-8 flex items-center justify-center sticky top-0 mt-2'
}
>
<Icon name={'kai_colored'} size={18} />
</div>
)}
<div className={'mt-1 flex flex-col'}>
<div className="markdown-body" ref={bodyRef}>
<div
className={cn(
'markdown-body',
isEditing ? 'border-l border-l-main pl-2' : '',
)}
data-openreplay-obscured
ref={bodyRef}
>
<Markdown remarkPlugins={[remarkGfm]}>{text}</Markdown>
</div>
{metric ? (
<div
ref={chartRef}
className="p-2 border-gray-light rounded-lg shadow bg-glassWhite mb-2"
>
<WidgetChart metric={metric} isPreview height={360} />
</div>
) : null}
{isUser ? (
isLast ? (
<>
<div
onClick={onRetry}
className={
'ml-auto flex items-center gap-2 px-2 rounded-lg border border-gray-medium text-sm cursor-pointer hover:border-main hover:text-main w-fit'
}
onClick={onEdit}
className={cn(
'ml-auto flex items-center gap-2 px-2',
'rounded-lg border border-gray-medium text-sm cursor-pointer',
'hover:border-main hover:text-main w-fit',
canEdit && !isEditing ? '' : 'hidden',
)}
>
<ListRestart size={16} />
<div>Edit</div>
<div>{t('Edit')}</div>
</div>
) : null
<div
onClick={onCancelEdit}
className={cn(
'ml-auto flex items-center gap-2 px-2',
'rounded-lg border border-gray-medium text-sm cursor-pointer',
'hover:border-main hover:text-main w-fit',
isEditing ? '' : 'hidden',
)}
>
<div>{t('Cancel')}</div>
</div>
</>
) : (
<div className={'flex items-center gap-2'}>
{duration ? <MsgDuration duration={duration} /> : null}
@ -132,6 +248,15 @@ export function ChatMsg({
>
<ThumbsDown size={16} />
</IconButton>
{supports_visualization ? (
<IconButton
tooltip="Visualize this answer"
onClick={getChart}
processing={loadingChart}
>
<ChartLine size={16} />
</IconButton>
) : null}
<CopyButton
getHtml={() => bodyRef.current?.innerHTML}
content={text}
@ -215,3 +340,5 @@ function MsgDuration({ duration }: { duration: number }) {
</div>
);
}
export default observer(ChatMsg);

View file

@ -0,0 +1,140 @@
import React from 'react';
import { splitByDate } from '../utils';
import { useQuery } from '@tanstack/react-query';
import { MessagesSquare, Trash } from 'lucide-react';
import { kaiService } from 'App/services';
import { toast } from 'react-toastify';
import { useTranslation } from 'react-i18next';
import { kaiStore } from '../KaiStore';
import { observer } from 'mobx-react-lite';
function ChatsModal({
onSelect,
projectId,
}: {
onSelect: (threadId: string, title: string) => void;
projectId: string;
}) {
const { t } = useTranslation();
const { usage } = kaiStore;
const {
data = [],
isPending,
refetch,
} = useQuery({
queryKey: ['kai', 'chats', projectId],
queryFn: () => kaiService.getKaiChats(projectId),
staleTime: 1000 * 60,
});
React.useEffect(() => {
kaiStore.checkUsage();
}, []);
const datedCollections = React.useMemo(() => {
return data.length ? splitByDate(data) : [];
}, [data.length]);
const onDelete = async (id: string) => {
try {
await kaiService.deleteKaiChat(projectId, id);
} catch (e) {
toast.error("Something wen't wrong. Please try again later.");
}
refetch();
};
return (
<div className={'h-screen w-full flex flex-col gap-2 p-4'}>
<div className={'flex items-center font-semibold text-lg gap-2'}>
<MessagesSquare size={16} />
<span>{t('Chats')}</span>
</div>
{usage.percent > 80 ? (
<div className="text-red text-sm">
{t('You have used {{used}} out of {{total}} daily requests', {
used: usage.used,
total: usage.total,
})}
</div>
) : null}
{isPending ? (
<div className="animate-pulse text-disabled-text">
{t('Loading chats')}...
</div>
) : (
<div className="overflow-y-auto flex flex-col gap-2">
{datedCollections.map((col, i) => (
<React.Fragment key={`${i}_${col.date}`}>
<ChatCollection
data={col.entries}
date={col.date}
onSelect={onSelect}
onDelete={onDelete}
/>
</React.Fragment>
))}
</div>
)}
</div>
);
}
function ChatCollection({
data,
onSelect,
onDelete,
date,
}: {
data: { title: string; thread_id: string }[];
onSelect: (threadId: string, title: string) => void;
onDelete: (threadId: string) => void;
date: string;
}) {
return (
<div>
<div className="text-disabled-text">{date}</div>
<ChatsList data={data} onSelect={onSelect} onDelete={onDelete} />
</div>
);
}
function ChatsList({
data,
onSelect,
onDelete,
}: {
data: { title: string; thread_id: string }[];
onSelect: (threadId: string, title: string) => void;
onDelete: (threadId: string) => void;
}) {
return (
<div className="flex flex-col gap-1 -mx-4 px-4">
{data.map((chat) => (
<div
key={chat.thread_id}
className="flex items-center relative group min-h-7"
>
<div
style={{ width: 270 - 28 - 4 }}
className="rounded-l pl-2 min-h-7 h-full w-full hover:bg-active-blue flex items-center"
>
<div
onClick={() => onSelect(chat.thread_id, chat.title)}
className="cursor-pointer hover:underline truncate"
>
{chat.title}
</div>
</div>
<div
onClick={() => onDelete(chat.thread_id)}
className="cursor-pointer opacity-0 group-hover:opacity-100 rounded-r min-h-7 h-full px-2 flex items-center group-hover:bg-active-blue"
>
<Trash size={14} className="text-disabled-text" />
</div>
</div>
))}
</div>
);
}
export default observer(ChatsModal);

View file

@ -0,0 +1,31 @@
import React from 'react';
import { kaiStore } from '../KaiStore';
import { observer } from 'mobx-react-lite';
import { Progress, Tooltip } from 'antd';
function Usage() {
const usage = kaiStore.usage;
if (usage.total === 0) {
return null;
}
const roundPercent = Math.round((usage.used / usage.total) * 100);
return (
<div>
<Tooltip title={`Daily response limit (${usage.used}/${usage.total})`}>
<Progress
percent={roundPercent}
strokeColor={
roundPercent < 99 ? 'var(--color-main)' : 'var(--color-red)'
}
showInfo={false}
type="circle"
size={24}
/>
</Tooltip>
</div>
);
}
export default observer(Usage);

View file

@ -0,0 +1,36 @@
import { DateTime } from 'luxon';
type DatedEntry = {
date: string;
entries: { datetime: string }[];
}
export function splitByDate(entries: { datetime: string }[]) {
const today = DateTime.now().startOf('day');
const yesterday = today.minus({ days: 1 });
const result: DatedEntry[] = [
{ date: 'Today', entries: [] },
{ date: 'Yesterday', entries: [] },
];
entries.forEach((ent) => {
const entryDate = DateTime.fromISO(ent.datetime).startOf('day');
if (entryDate.toMillis() === today.toMillis()) {
result[0].entries.push(ent);
} else if (entryDate.toMillis() === yesterday.toMillis()) {
result[1].entries.push(ent);
} else {
const date = entryDate.toFormat('dd LLL, yyyy')
const existingEntry = result.find((r) => r.date === date);
if (existingEntry) {
existingEntry.entries.push(ent);
} else {
result.push({ entries: [ent], date });
}
}
});
return result.filter((r) => r.entries.length > 0);
}

View file

@ -54,11 +54,22 @@ function ClipPlayerContent(props: Props) {
if (!playerContext.player) return null;
const outerHeight = props.isHighlight ? 556 + 39 : 556;
const innerHeight = props.isHighlight ? 504 + 39 : 504;
return (
<div
className={cn(styles.playerBlock, 'flex flex-col', 'overflow-x-hidden max-h-[556px] h-[556px]')}
className={cn(
styles.playerBlock,
'flex flex-col',
`overflow-x-hidden max-h-[${outerHeight}px] h-[${outerHeight}px]`,
)}
>
<div className={cn(stl.playerBody, 'flex-1 flex flex-col relative max-h-[504px] h-[504px]')}>
<div
className={cn(
stl.playerBody,
`flex-1 flex flex-col relative max-h-[${innerHeight}px] h-[${innerHeight}px]`,
)}
>
<div className={cn(stl.playerBody, 'flex flex-1 flex-col relative')}>
<div className="relative flex-1 overflow-hidden group">
<ClipPlayerOverlay autoplay={props.autoplay} />

View file

@ -199,6 +199,8 @@ function BottomBlock({ panelHeight, block }: { panelHeight: number; block: numbe
return <BackendLogsPanel />;
case LONG_TASK:
return <LongTaskPanel />;
case OVERVIEW:
return <OverviewPanel />;
default:
return null;
}

View file

@ -13,6 +13,7 @@ export function LoadingFetch({ provider }: { provider: string }) {
<LoadingOutlined size={32} />
<div>
{t('Fetching logs from')}
&nbsp;
{provider}
...
</div>

View file

@ -206,7 +206,7 @@ function HighlightPanel({ onClose }: { onClose: () => void }) {
<Tag
onClick={() => addTag(tag)}
key={tag}
className="cursor-pointer rounded-lg hover:bg-indigo-50 mr-0"
className="cursor-pointer rounded-lg hover:bg-indigo-lightest mr-0"
color={tagProps[tag]}
bordered={false}
>

View file

@ -137,13 +137,15 @@ function SubHeader(props) {
return (
<>
<WarnBadge
siteId={projectId!}
currentLocation={currentLocation}
version={currentSession?.trackerVersion ?? ''}
containerStyle={{ position: 'relative', left: 0, top: 0, transform: 'none', zIndex: 10 }}
trackerWarnStyle={{ backgroundColor: '#fffbeb' }}
/>
<WarnBadge
siteId={projectId!}
currentLocation={currentLocation}
version={currentSession?.trackerVersion ?? ''}
containerStyle={{ position: 'relative', left: 0, top: 0, transform: 'none', zIndex: 10 }}
trackerWarnStyle={{ backgroundColor: 'var(--color-yellow)', color: 'black' }}
virtualElsFailed={showVModeBadge}
onVMode={onVMode}
/>
<div
className="w-full px-4 flex items-center border-b relative"
style={{
@ -154,13 +156,6 @@ function SubHeader(props) {
: undefined,
}}
>
<WarnBadge
siteId={projectId!}
currentLocation={currentLocation}
version={currentSession?.trackerVersion ?? ''}
virtualElsFailed={showVModeBadge}
onVMode={onVMode}
/>
<SessionTabs />

View file

@ -111,7 +111,7 @@ const WarnBadge = React.memo(
transform: 'translate(-50%, 80%)',
fontWeight: 500,
};
const defaultContainerClass = 'flex flex-col gap-2';
const defaultContainerClass = 'flex flex-col';
const defaultWarnClass =
'px-3 py-.5 border border-gray-lighter shadow-sm rounded bg-active-blue flex items-center justify-between';
@ -188,7 +188,7 @@ const WarnBadge = React.memo(
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(1)}
>
<Icon name="close" size={16} color="black" />
<Icon name="close" size={16} color="#000000" />
</div>
</div>
) : null}
@ -213,27 +213,6 @@ const WarnBadge = React.memo(
</div>
</div>
) : null}
{warnings[2] ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div className="flex flex-col">
<div>
{t(
'If you have issues displaying custom HTML elements (i.e when using LWC), consider turning on Virtual Mode.',
)}
</div>
<div className="link" onClick={onVMode}>
{t('Enable')}
</div>
</div>
<div
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(2)}
>
<X size={18} strokeWidth={1.5} />
</div>
</div>
) : null}
</div>
);
},

Some files were not shown because too many files have changed in this diff Show more