resolved conflicts

This commit is contained in:
Андрей Бабушкин 2025-04-11 15:32:57 +02:00
commit 6ddee47a22
230 changed files with 11227 additions and 3506 deletions

View file

@ -47,6 +47,7 @@ runs:
"JWT_SECRET:.global.jwtSecret"
"JWT_SPOT_REFRESH_SECRET:.chalice.env.JWT_SPOT_REFRESH_SECRET"
"JWT_SPOT_SECRET:.global.jwtSpotSecret"
"JWT_SECRET:.global.tokenSecret"
"LICENSE_KEY:.global.enterpriseEditionLicense"
"MINIO_ACCESS_KEY:.global.s3.accessKey"
"MINIO_SECRET_KEY:.global.s3.secretKey"

122
.github/workflows/assist-server-ee.yaml vendored Normal file
View file

@ -0,0 +1,122 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
required: false
default: "false"
push:
branches:
- dev
paths:
- "ee/assist-server/**"
name: Build and Deploy Assist-Server EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- uses: ./.github/composite-actions/update-keys
with:
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
assist_key: ${{ secrets.ASSIST_KEY }}
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
license_key: ${{ secrets.EE_LICENSE_KEY }}
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
pg_password: ${{ secrets.EE_PG_PASSWORD }}
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
name: Update Keys
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing Assist-Server image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd assist-server
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("assist-server")
for image in ${images[*]};do
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("assist-server")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
pwd
cd scripts/helmcharts/
# Update changed image tag
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mkdir -p /tmp/charts
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
rm -rf openreplay/charts/*
mv /tmp/charts/* openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging

185
.github/workflows/patch-build-old.yaml vendored Normal file
View file

@ -0,0 +1,185 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on:
workflow_dispatch:
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
required: true
default: 'chalice,frontend'
tag:
description: 'Tag to build patches from.'
required: true
type: string
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
jobs:
deploy:
name: Build Patch from old tag
runs-on: ubuntu-latest
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 4
ref: ${{ github.event.inputs.tag }}
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
- name: Create backup tag with timestamp
run: |
set -e # Exit immediately if a command exits with a non-zero status
TIMESTAMP=$(date +%Y%m%d%H%M%S)
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
echo "Created backup tag: $BACKUP_TAG"
# Get the oldest commit date from the last 3 commits in raw format
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
# Add 1 second to the timestamp
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
- name: Setup yq
uses: mikefarah/yq@master
# Configure AWS credentials for the first registry
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
id: login-ecr-arm
run: |
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
run: echo "BRANCH_NAME=patch/main/${HEAD_COMMIT_ID}" >> $GITHUB_ENV
- name: Build
id: build-image
env:
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
run: |
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
}
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout $INPUT_TAG
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
}
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
done
- name: Change commit timestamp
run: |
# Convert the timestamp to a date format git can understand
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
echo "Setting commit date to: $NEW_DATE"
# Amend the commit with the new date
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
# Verify the change
git log -1 --pretty=format:"Commit now dated: %cD"
# git tag and push
git tag $INPUT_TAG -f
git push origin $INPUT_TAG -f
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
# MSAAS_REPO_FOLDER: /tmp/msaas
# with:
# limit-access-to-actor: true

View file

@ -22,22 +22,14 @@ jobs:
- name: Cache tracker modules
uses: actions/cache@v3
with:
path: tracker/tracker/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
restore-keys: |
test_tracker_build{{ runner.OS }}-build-
test_tracker_build{{ runner.OS }}-
- name: Cache tracker-assist modules
uses: actions/cache@v3
with:
path: tracker/tracker-assist/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lockb') }}
path: tracker/node_modules
key: ${{ runner.OS }}-test_tracker_build-${{ hashFiles('**/bun.lock') }}
restore-keys: |
test_tracker_build{{ runner.OS }}-build-
test_tracker_build{{ runner.OS }}-
- name: Setup Testing packages
run: |
cd tracker/tracker
cd tracker
bun install
- name: Jest tests
run: |
@ -47,10 +39,6 @@ jobs:
run: |
cd tracker/tracker
bun run build
- name: (TA) Setup Testing packages
run: |
cd tracker/tracker-assist
bun install
- name: (TA) Jest tests
run: |
cd tracker/tracker-assist

1
.gitignore vendored
View file

@ -7,3 +7,4 @@ node_modules
**/*.envrc
.idea
*.mob*
install-state.gz

View file

@ -6,16 +6,15 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.12"
boto3 = "==1.37.21"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
elasticsearch = "==8.17.2"
jira = "==3.8.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
cachetools = "==5.5.2"
fastapi = "==0.115.12"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}

View file

@ -16,7 +16,7 @@ from chalicelib.utils import helper
from chalicelib.utils import pg_client, ch_client
from crons import core_crons, core_dynamic_crons
from routers import core, core_dynamic
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
loglevel = config("LOGLEVEL", default=logging.WARNING)
print(f">Loglevel set to: {loglevel}")
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
app.include_router(spot.app)
app.include_router(spot.app_apikey)
app.include_router(product_anaytics.public_app)
app.include_router(product_anaytics.app)
app.include_router(product_anaytics.app_apikey)
app.include_router(product_analytics.public_app, prefix="/pa")
app.include_router(product_analytics.app, prefix="/pa")
app.include_router(product_analytics.app_apikey, prefix="/pa")

View file

@ -241,3 +241,25 @@ def get_colname_by_key(project_id, key):
return None
return index_to_colname(meta_keys[key])
def get_for_filters(project_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT {",".join(column_names())}
FROM public.projects
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
LIMIT 1;""", {"project_id": project_id})
cur.execute(query=query)
metas = cur.fetchone()
results = []
if metas is not None:
for i, k in enumerate(metas.keys()):
if metas[k] is not None:
results.append({"id": f"meta_{i}",
"name": k,
"displayName": metas[k],
"possibleTypes": ["String"],
"autoCaptured": False,
"icon": None})
return {"total": len(results), "list": results}

View file

@ -6,7 +6,7 @@ from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
def filter_stages(stages: List[schemas.SessionSearchEventSchema]):
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
def __parse_events(f_events: List[dict]):
return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
return [schemas.SessionSearchEventSchema.parse_obj(e) for e in f_events]
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
def __fix_stages(f_events: List[schemas.SessionSearchEventSchema]):
if f_events is None:
return
events = []

View file

@ -160,7 +160,7 @@ s.start_ts,
s.duration"""
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
start_time: int,
end_time: int) -> str | None:
full_args = {
@ -240,13 +240,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS))
if not location_condition:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
if no_click:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0],

View file

@ -24,8 +24,9 @@ def get_by_url(project_id, data: schemas.GetHeatMapPayloadSchema):
"main_events.`$event_name` = 'CLICK'",
"isNotNull(JSON_VALUE(CAST(main_events.`$properties` AS String), '$.normalized_x'))"
]
if data.operator == schemas.SearchEventOperator.IS:
if data.operator == schemas.SearchEventOperator.PATTERN:
constraints.append("match(main_events.`$properties`.url_path'.:String,%(url)s)")
elif data.operator == schemas.SearchEventOperator.IS:
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') = %(url)s")
else:
constraints.append("JSON_VALUE(CAST(main_events.`$properties` AS String), '$.url_path') ILIKE %(url)s")
@ -179,7 +180,7 @@ toUnixTimestamp(s.datetime)*1000 AS start_ts,
s.duration AS duration"""
def __get_1_url(location_condition: schemas.SessionSearchEventSchema2 | None, session_id: str, project_id: int,
def __get_1_url(location_condition: schemas.SessionSearchEventSchema | None, session_id: str, project_id: int,
start_time: int,
end_time: int) -> str | None:
full_args = {
@ -262,13 +263,13 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS))
if not location_condition:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
if no_click:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.CLICK,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.events.append(schemas.SessionSearchEventSchema(type=schemas.EventType.CLICK,
value=[],
operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0],

View file

@ -241,7 +241,7 @@ def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas
:return:
"""
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]

View file

@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
def get_simple_funnel(filter_d: schemas.CardSeriesFilterSchema, project: schemas.ProjectContext,
metric_format: schemas.MetricExtendedFormatType) -> List[RealDictRow]:
stages: List[schemas.SessionSearchEventSchema2] = filter_d.events
stages: List[schemas.SessionSearchEventSchema] = filter_d.events
filters: List[schemas.SessionSearchFilterSchema] = filter_d.filters
platform = project.platform
constraints = ["e.project_id = %(project_id)s",

View file

@ -1,14 +0,0 @@
from chalicelib.utils.ch_client import ClickHouseClient
def search_events(project_id: int, data: dict):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT *
FROM taha.events
WHERE project_id=%(project_id)s
ORDER BY created_at;""",
params={"project_id": project_id})
x = ch_client.execute(r)
return x

View file

@ -0,0 +1,139 @@
import logging
import schemas
from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.ch_client import ClickHouseClient
from chalicelib.utils.exp_ch_helper import get_sub_condition
logger = logging.getLogger(__name__)
def get_events(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT DISTINCT ON(event_name,auto_captured)
COUNT(1) OVER () AS total,
event_name AS name, display_name, description,
auto_captured
FROM product_analytics.all_events
WHERE project_id=%(project_id)s
ORDER BY auto_captured,display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
rows = ch_client.execute(r)
if len(rows) == 0:
return {"total": 0, "list": []}
total = rows[0]["total"]
for i, row in enumerate(rows):
row["id"] = f"event_{i}"
row["icon"] = None
row["possibleTypes"] = ["string"]
row.pop("total")
return {"total": total, "list": helper.list_to_camel_case(rows)}
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
with ClickHouseClient() as ch_client:
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
constraints = ["project_id = %(projectId)s",
"created_at >= toDateTime(%(startDate)s/1000)",
"created_at <= toDateTime(%(endDate)s/1000)"]
ev_constraints = []
for i, f in enumerate(data.filters):
if not f.is_event:
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
if f.is_predefined:
column = f.name
else:
column = f"properties.{f.name}"
if is_any:
condition = f"notEmpty{column})"
elif is_undefined:
condition = f"empty({column})"
else:
condition = sh.multi_conditions(
get_sub_condition(col_name=column, val_name=f_k, operator=f.operator),
values=f.value, value_key=f_k)
constraints.append(condition)
else:
e_k = f"e_value{i}"
full_args = {**full_args, e_k: f.name}
condition = f"`$event_name` = %({e_k})s"
sub_conditions = []
for j, ef in enumerate(f.properties.filters):
p_k = f"e_{i}_p_{j}"
full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k)}
if ef.is_predefined:
sub_condition = get_sub_condition(col_name=ef.name, val_name=p_k, operator=ef.operator)
else:
sub_condition = get_sub_condition(col_name=f"properties.{ef.name}",
val_name=p_k, operator=ef.operator)
sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k))
if len(sub_conditions) > 0:
condition += " AND (" + (" " + f.properties.operator + " ").join(sub_conditions) + ")"
ev_constraints.append(condition)
constraints.append("(" + " OR ".join(ev_constraints) + ")")
query = ch_client.format(
f"""SELECT COUNT(1) OVER () AS total,
event_id,
`$event_name`,
created_at,
`distinct_id`,
`$browser`,
`$import`,
`$os`,
`$country`,
`$state`,
`$city`,
`$screen_height`,
`$screen_width`,
`$source`,
`$user_id`,
`$device`
FROM product_analytics.events
WHERE {" AND ".join(constraints)}
ORDER BY created_at
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters=full_args)
rows = ch_client.execute(query)
if len(rows) == 0:
return {"total": 0, "rows": [], "src": 2}
total = rows[0]["total"]
for r in rows:
r.pop("total")
return {"total": total, "rows": rows, "src": 2}
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
all_events.event_name AS name,
*
FROM product_analytics.all_events
WHERE project_id=%(project_id)s
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id, "limit": page.limit, "offset": (page.page - 1) * page.limit})
rows = ch_client.execute(r)
if len(rows) == 0:
return {"total": 0, "list": []}
total = rows[0]["total"]
for i, row in enumerate(rows):
row["id"] = f"event_{i}"
row["icon"] = None
row["possibleTypes"] = ["string"]
row.pop("total")
return {"total": total, "list": helper.list_to_camel_case(rows)}

View file

@ -0,0 +1,83 @@
from chalicelib.utils import helper, exp_ch_helper
from chalicelib.utils.ch_client import ClickHouseClient
import schemas
def get_all_properties(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
property_name AS name, display_name,
array_agg(DISTINCT event_properties.value_type) AS possible_types
FROM product_analytics.all_properties
LEFT JOIN product_analytics.event_properties USING (project_id, property_name)
WHERE all_properties.project_id=%(project_id)s
GROUP BY property_name,display_name
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id,
"limit": page.limit,
"offset": (page.page - 1) * page.limit})
properties = ch_client.execute(r)
if len(properties) == 0:
return {"total": 0, "list": []}
total = properties[0]["total"]
properties = helper.list_to_camel_case(properties)
for i, p in enumerate(properties):
p["id"] = f"prop_{i}"
p["icon"] = None
p["possibleTypes"] = exp_ch_helper.simplify_clickhouse_types(p["possibleTypes"])
p.pop("total")
return {"total": total, "list": properties}
def get_event_properties(project_id: int, event_name):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT all_properties.property_name,
all_properties.display_name
FROM product_analytics.event_properties
INNER JOIN product_analytics.all_properties USING (property_name)
WHERE event_properties.project_id=%(project_id)s
AND all_properties.project_id=%(project_id)s
AND event_properties.event_name=%(event_name)s
ORDER BY created_at;""",
parameters={"project_id": project_id, "event_name": event_name})
properties = ch_client.execute(r)
return helper.list_to_camel_case(properties)
def get_lexicon(project_id: int, page: schemas.PaginatedSchema):
with ClickHouseClient() as ch_client:
r = ch_client.format(
"""SELECT COUNT(1) OVER () AS total,
all_properties.property_name AS name,
all_properties.*,
possible_types.values AS possible_types,
possible_values.values AS sample_values
FROM product_analytics.all_properties
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value_type) AS values
FROM product_analytics.event_properties
WHERE project_id=%(project_id)s
GROUP BY 1, 2) AS possible_types
USING (project_id, property_name)
LEFT JOIN (SELECT project_id, property_name, array_agg(DISTINCT value) AS values
FROM product_analytics.property_values_samples
WHERE project_id=%(project_id)s
GROUP BY 1, 2) AS possible_values USING (project_id, property_name)
WHERE project_id=%(project_id)s
ORDER BY display_name
LIMIT %(limit)s OFFSET %(offset)s;""",
parameters={"project_id": project_id,
"limit": page.limit,
"offset": (page.page - 1) * page.limit})
properties = ch_client.execute(r)
if len(properties) == 0:
return {"total": 0, "list": []}
total = properties[0]["total"]
for i, p in enumerate(properties):
p["id"] = f"prop_{i}"
p["icon"] = None
p.pop("total")
return {"total": total, "list": helper.list_to_camel_case(properties)}

View file

@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
from . import sessions_pg
from . import sessions_pg as sessions_legacy
from . import sessions_ch
from . import sessions_search_pg
from . import sessions_search_pg as sessions_search_legacy
if config("EXP_METRICS", cast=bool, default=False):
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions
from . import sessions_search_ch as sessions_search
else:
from . import sessions_pg as sessions
from . import sessions_search_pg as sessions_search
# if config("EXP_METRICS", cast=bool, default=False):
# from . import sessions_ch as sessions
# else:
# from . import sessions_pg as sessions

View file

@ -6,6 +6,7 @@ from chalicelib.core import events, metadata
from . import performance_event, sessions_legacy
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.exp_ch_helper import get_sub_condition
logger = logging.getLogger(__name__)
@ -48,8 +49,8 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
query = f"""SELECT gs.generate_series AS timestamp,
COALESCE(COUNT(DISTINCT processed_sessions.user_id),0) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
LEFT JOIN (SELECT multiIf(s.user_id IS NOT NULL AND s.user_id != '', s.user_id,
s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != '',
LEFT JOIN (SELECT multiIf(isNotNull(s.user_id) AND notEmpty(s.user_id), s.user_id,
isNotNull(s.user_anonymous_id) AND notEmpty(s.user_anonymous_id),
s.user_anonymous_id, toString(s.user_uuid)) AS user_id,
s.datetime AS datetime
{query_part}) AS processed_sessions ON(TRUE)
@ -148,7 +149,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
"type": e.type,
"isEvent": True,
"value": [],
@ -173,7 +174,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
"type": e.type,
"isEvent": True,
"value": [],
@ -253,7 +254,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
FROM (SELECT s.user_id AS user_id {extra_col}
{query_part}
WHERE isNotNull(user_id)
AND user_id != '') AS filtred_sessions
AND notEmpty(user_id)) AS filtred_sessions
{extra_where}
GROUP BY {main_col}
ORDER BY total DESC
@ -277,7 +278,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
@ -330,7 +331,11 @@ def json_condition(table_alias, json_column, json_key, op, values, value_key, ch
extract_func = "JSONExtractFloat" if numeric_type == "float" else "JSONExtractInt"
condition = f"{extract_func}(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
else:
condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
# condition = f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}') {op} %({value_key})s"
condition = get_sub_condition(
col_name=f"JSONExtractString(toString({table_alias}.`{json_column}`), '{json_key}')",
val_name=value_key, operator=op
)
conditions.append(sh.multi_conditions(condition, values, value_key=value_key))
@ -660,7 +665,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
**sh.multi_values(event.source, value_key=s_k),
e_k: event.value[0] if len(event.value) > 0 else event.value}
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -671,24 +677,44 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(json_condition(
"main",
"$properties",
"selector", op, event.value, e_k)
# event_where.append(json_condition(
# "main",
# "$properties",
# "selector", op, event.value, e_k)
# )
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"main.`$properties`.selector",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions[-1]["condition"] = event_where[-1]
else:
if is_not:
event_where.append(json_condition(
"sub", "$properties", _column, op, event.value, e_k
))
# event_where.append(json_condition(
# "sub", "$properties", _column, op, event.value, e_k
# ))
event_where.append(
sh.multi_conditions(
get_sub_condition(col_name=f"sub.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"
}
)
events_conditions_not[-1]["condition"] = event_where[-1]
else:
# event_where.append(
# json_condition("main", "$properties", _column, op, event.value, e_k)
# )
event_where.append(
json_condition("main", "$properties", _column, op, event.value, e_k)
sh.multi_conditions(
get_sub_condition(col_name=f"main.`$properties`.{_column}",
val_name=e_k, operator=event.operator),
event.value, value_key=e_k)
)
events_conditions[-1]["condition"] = event_where[-1]
else:
@ -870,12 +896,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
event_where.append(
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
value_key=s_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
@ -1191,8 +1220,35 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
else:
logging.warning(f"undefined GRAPHQL filter: {f.type}")
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
elif event_type == schemas.EventType.EVENT:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = events.EventType.CLICK.column
event_where.append(f"main.`$event_name`=%({e_k})s AND main.session_id>0")
events_conditions.append({"type": event_where[-1], "condition": ""})
else:
continue
if event.properties is not None and len(event.properties.filters) > 0:
sub_conditions = []
for l, property in enumerate(event.properties.filters):
a_k = f"{e_k}_att_{l}"
full_args = {**full_args,
**sh.multi_values(property.value, value_key=a_k)}
if property.is_predefined:
condition = get_sub_condition(col_name=f"main.{property.name}",
val_name=a_k, operator=property.operator)
else:
condition = get_sub_condition(col_name=f"main.properties.{property.name}",
val_name=a_k, operator=property.operator)
event_where.append(
sh.multi_conditions(condition, property.value, value_key=a_k)
)
sub_conditions.append(event_where[-1])
if len(sub_conditions) > 0:
sub_conditions = (" " + event.properties.operator + " ").join(sub_conditions)
events_conditions[-1]["condition"] += " AND " if len(events_conditions[-1]["condition"]) > 0 else ""
events_conditions[-1]["condition"] += "(" + sub_conditions + ")"
if event_index == 0 or or_events:
event_where += ss_constraints
if is_not:

View file

@ -1,6 +1,5 @@
import ast
import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata, projects
@ -219,7 +218,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
}
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,

View file

@ -143,7 +143,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
"type": e.type,
"isEvent": True,
"value": [],
@ -160,7 +160,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
for e in data.events:
if e.type == schemas.EventType.REQUEST_DETAILS:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
extra_conditions[e.operator] = schemas.SessionSearchEventSchema.model_validate({
"type": e.type,
"isEvent": True,
"value": [],
@ -273,7 +273,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,

View file

@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
ORDER BY sort_key {data.order}
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
parameters=full_args)
logging.debug("--------------------")
logging.debug(main_query)
logging.debug("--------------------")
try:
logging.debug("--------------------")
sessions_list = cur.execute(main_query)
logging.debug("--------------------")
except Exception as err:
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query)
@ -262,7 +262,7 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
FROM public.user_favorite_sessions
WHERE user_favorite_sessions.user_id = %(userId)s
) AS favorite_sessions USING (session_id)
WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
WHERE s.project_id = %(id)s AND isNotNull(s.duration) AND s.{col_name} = %(value)s
) AS full_sessions
ORDER BY favorite DESC, issue_score DESC
LIMIT 10

View file

@ -11,9 +11,3 @@ if smtp.has_smtp():
logger.info("valid SMTP configuration found")
else:
logger.info("no SMTP configuration found or SMTP validation failed")
if config("EXP_CH_DRIVER", cast=bool, default=True):
logging.info(">>> Using new CH driver")
from . import ch_client_exp as ch_client
else:
from . import ch_client

View file

@ -1,73 +1,185 @@
import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_driver
import clickhouse_connect
from clickhouse_connect.driver.query import QueryContext
from decouple import config
logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
else:
logger.debug(
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
if config("LOCAL_DEV", cast=bool, default=False):
logger.debug(args[0])
else:
logger.debug(str.encode(args[0]))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient:
__client = None
def __init__(self, database=None):
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
self.__client = clickhouse_driver.Client(host=config("ch_host"),
database=database if database else config("ch_database",
default="default"),
user=config("ch_user", default="default"),
password=config("ch_password", default=""),
port=config("ch_port", cast=int),
settings=settings,
**extra_args) \
if self.__client is None else self.__client
if self.__client is None:
if database is not None or not config('CH_POOL', cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
database=database if database else config("ch_database",
default="default"),
settings=settings,
**extra_args)
else:
self.__client = CH_pool.get_connection()
self.__client.execute = transform_result(self, self.__client.query)
self.__client.format = self.format
def __enter__(self):
return self
def execute(self, query, parameters=None, **args):
try:
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
keys = tuple(x for x, y in results[1])
return [dict(zip(keys, i)) for i in results[0]]
except Exception as err:
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
logger.error("--------- CH QUERY EXCEPTION -----------")
logger.error(self.format(query=query, parameters=parameters)
.replace('\n', '\\n')
.replace(' ', ' ')
.replace(' ', ' '))
logger.error("--------------------")
raise err
def insert(self, query, params=None, **args):
return self.__client.execute(query=query, params=params, **args)
def client(self):
return self.__client
def format(self, query, parameters):
if parameters is None:
return query
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
def format(self, query, parameters=None):
if parameters:
ctx = QueryContext(query=query, parameters=parameters)
return ctx.final_query
return query
def __exit__(self, *args):
pass
if config('CH_POOL', cast=bool, default=True):
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init():
logger.info(f">CH_POOL:not defined")
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate():
pass
global CH_pool
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -1,178 +0,0 @@
import logging
import threading
import time
from functools import wraps
from queue import Queue, Empty
import clickhouse_connect
from clickhouse_connect.driver.query import QueryContext
from decouple import config
logger = logging.getLogger(__name__)
_CH_CONFIG = {"host": config("ch_host"),
"user": config("ch_user", default="default"),
"password": config("ch_password", default=""),
"port": config("ch_port_http", cast=int),
"client_name": config("APP_NAME", default="PY")}
CH_CONFIG = dict(_CH_CONFIG)
settings = {}
if config('ch_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
if config('ch_receive_timeout', cast=int, default=-1) > 0:
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
extra_args = {}
if config("CH_COMPRESSION", cast=bool, default=True):
extra_args["compression"] = "lz4"
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
logger.debug(str.encode(args[0]))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
result = result.result_rows
result = [dict(zip(column_names, row)) for row in result]
return result
return wrapper
class ClickHouseConnectionPool:
def __init__(self, min_size, max_size):
self.min_size = min_size
self.max_size = max_size
self.pool = Queue()
self.lock = threading.Lock()
self.total_connections = 0
# Initialize the pool with min_size connections
for _ in range(self.min_size):
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.pool.put(client)
self.total_connections += 1
def get_connection(self):
try:
# Try to get a connection without blocking
client = self.pool.get_nowait()
return client
except Empty:
with self.lock:
if self.total_connections < self.max_size:
client = clickhouse_connect.get_client(**CH_CONFIG,
database=config("ch_database", default="default"),
settings=settings,
**extra_args)
self.total_connections += 1
return client
# If max_size reached, wait until a connection is available
client = self.pool.get()
return client
def release_connection(self, client):
self.pool.put(client)
def close_all(self):
with self.lock:
while not self.pool.empty():
client = self.pool.get()
client.close()
self.total_connections = 0
CH_pool: ClickHouseConnectionPool = None
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
RETRY = 0
def make_pool():
if not config('CH_POOL', cast=bool, default=True):
return
global CH_pool
global RETRY
if CH_pool is not None:
try:
CH_pool.close_all()
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)
try:
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
max_size=config("CH_MAXCONN", cast=int, default=8))
if CH_pool is not None:
logger.info("Connection pool created successfully for CH")
except ConnectionError as error:
logger.error("Error while connecting to CH", exc_info=error)
if RETRY < RETRY_MAX:
RETRY += 1
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
time.sleep(RETRY_INTERVAL)
make_pool()
else:
raise error
class ClickHouseClient:
__client = None
def __init__(self, database=None):
if self.__client is None:
if database is not None or not config('CH_POOL', cast=bool, default=True):
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
database=database if database else config("ch_database",
default="default"),
settings=settings,
**extra_args)
else:
self.__client = CH_pool.get_connection()
self.__client.execute = transform_result(self, self.__client.query)
self.__client.format = self.format
def __enter__(self):
return self.__client
def format(self, query, parameters=None):
if parameters:
ctx = QueryContext(query=query, parameters=parameters)
return ctx.final_query
return query
def __exit__(self, *args):
if config('CH_POOL', cast=bool, default=True):
CH_pool.release_connection(self.__client)
else:
self.__client.close()
async def init():
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
if config('CH_POOL', cast=bool, default=True):
make_pool()
async def terminate():
global CH_pool
if CH_pool is not None:
try:
CH_pool.close_all()
logger.info("Closed all connexions to CH")
except Exception as error:
logger.error("Error while closing all connexions to CH", exc_info=error)

View file

@ -1,7 +1,10 @@
import logging
import re
from typing import Union
import schemas
import logging
from chalicelib.utils import sql_helper as sh
from schemas import SearchEventOperator
logger = logging.getLogger(__name__)
@ -66,3 +69,94 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
if event_type not in defs:
raise Exception(f"unsupported EventType:{event_type}")
return defs.get(event_type)
# AI generated
def simplify_clickhouse_type(ch_type: str) -> str:
"""
Simplify a ClickHouse data type name to a broader category like:
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
"""
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
while True:
match = pattern_wrappers.match(ch_type)
if match:
ch_type = match.group(2)
else:
break
# 2) Normalize (lowercase) for easier checks
normalized_type = ch_type.lower()
# 3) Use pattern matching or direct checks for known categories
# (You can adapt this as you see fit for your environment.)
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
return "int"
# Floats: Float32, Float64
if re.match(r'^float(32|64)$', normalized_type):
return "float"
# Decimal: Decimal(P, S)
if normalized_type.startswith("decimal"):
return "decimal"
# Date/DateTime
if normalized_type.startswith("date"):
return "datetime"
if normalized_type.startswith("datetime"):
return "datetime"
# Strings: String, FixedString(N)
if normalized_type.startswith("string"):
return "string"
if normalized_type.startswith("fixedstring"):
return "string"
# UUID
if normalized_type.startswith("uuid"):
return "uuid"
# Enums: Enum8(...) or Enum16(...)
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
return "enum"
# Arrays: Array(T)
if normalized_type.startswith("array"):
return "array"
# Tuples: Tuple(T1, T2, ...)
if normalized_type.startswith("tuple"):
return "tuple"
# Map(K, V)
if normalized_type.startswith("map"):
return "map"
# Nested(...)
if normalized_type.startswith("nested"):
return "nested"
# If we didn't match above, just return the original type in lowercase
return normalized_type
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
"""
Takes a list of ClickHouse types and returns a list of simplified types
by calling `simplify_clickhouse_type` on each.
"""
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
def get_sub_condition(col_name: str, val_name: str,
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]):
if operator == SearchEventOperator.PATTERN:
return f"match({col_name}, %({val_name})s)"
op = sh.get_sql_operator(operator)
return f"{col_name} {op} %({val_name})s"

View file

@ -14,6 +14,9 @@ def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventEx
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
# this is not used as an operator, it is used in order to maintain a valid value for conditions
schemas.SearchEventOperator.PATTERN: "regex",
# Selector operators:
schemas.ClickEventExtraOperator.IS: "=",
schemas.ClickEventExtraOperator.IS_NOT: "!=",
@ -72,4 +75,3 @@ def single_value(values):
if isinstance(v, Enum):
values[i] = v.value
return values

View file

@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
EXP_METRICS=true
EXP_SESSIONS_SEARCH=true

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.21
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.12
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.21
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.12
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6

View file

@ -0,0 +1,55 @@
from typing import Annotated
from fastapi import Body, Depends, Query
import schemas
from chalicelib.core import metadata
from chalicelib.core.product_analytics import events, properties
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/filters', tags=["product_analytics"])
def get_all_filters(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": {
"events": events.get_events(project_id=projectId, page=filter_query),
"filters": properties.get_all_properties(project_id=projectId, page=filter_query),
"metadata": metadata.get_for_filters(project_id=projectId)
}
}
@app.get('/{projectId}/events/names', tags=["product_analytics"])
def get_all_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.get_events(project_id=projectId, page=filter_query)}
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
def get_event_properties(projectId: int, event_name: str = None,
context: schemas.CurrentContext = Depends(OR_context)):
if not event_name or len(event_name) == 0:
return {"data": []}
return {"data": properties.get_event_properties(project_id=projectId, event_name=event_name)}
@app.post('/{projectId}/events/search', tags=["product_analytics"])
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.search_events(project_id=projectId, data=data)}
@app.get('/{projectId}/lexicon/events', tags=["product_analytics", "lexicon"])
def get_all_lexicon_events(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": events.get_lexicon(project_id=projectId, page=filter_query)}
@app.get('/{projectId}/lexicon/properties', tags=["product_analytics", "lexicon"])
def get_all_lexicon_properties(projectId: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": properties.get_lexicon(project_id=projectId, page=filter_query)}

View file

@ -1,15 +0,0 @@
import schemas
from chalicelib.core.metrics import product_anaytics2
from fastapi import Depends
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/events/search', tags=["dashboard"])
def search_events(projectId: int,
# data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return product_anaytics2.search_events(project_id=projectId, data={})

View file

@ -1,10 +1,12 @@
from fastapi import Body, Depends
from typing import Annotated
from fastapi import Body, Depends, Query
import schemas
from chalicelib.core.usability_testing import service
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
from or_dependencies import OR_context
from routers.base import get_routers
from schemas import schemas
public_app, app, app_apikey = get_routers()
tags = ["usability-tests"]
@ -77,9 +79,8 @@ async def update_ut_test(projectId: int, test_id: int, test_update: UTTestUpdate
@app.get('/{projectId}/usability-tests/{test_id}/sessions', tags=tags)
async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int = 10,
live: bool = False,
user_id: str = None):
async def get_sessions(projectId: int, test_id: int, filter_query: Annotated[schemas.PaginatedSchema, Query()],
live: bool = False, user_id: str = None):
"""
Get sessions related to a specific UT test.
@ -88,20 +89,21 @@ async def get_sessions(projectId: int, test_id: int, page: int = 1, limit: int =
"""
if live:
return service.ut_tests_sessions_live(projectId, test_id, page, limit)
return service.ut_tests_sessions_live(projectId, test_id, filter_query.page, filter_query.limit)
else:
return service.ut_tests_sessions(projectId, test_id, page, limit, user_id, live)
return service.ut_tests_sessions(projectId, test_id, filter_query.page, filter_query.limit, user_id, live)
@app.get('/{projectId}/usability-tests/{test_id}/responses/{task_id}', tags=tags)
async def get_responses(projectId: int, test_id: int, task_id: int, page: int = 1, limit: int = 10, query: str = None):
async def get_responses(projectId: int, test_id: int, task_id: int,
filter_query: Annotated[schemas.PaginatedSchema, Query()], query: str = None):
"""
Get responses related to a specific UT test.
- **project_id**: The unique identifier of the project.
- **test_id**: The unique identifier of the UT test.
"""
return service.get_responses(test_id, task_id, page, limit, query)
return service.get_responses(test_id, task_id, filter_query.page, filter_query.limit, query)
@app.get('/{projectId}/usability-tests/{test_id}/statistics', tags=tags)

View file

@ -1,2 +1,4 @@
from .schemas import *
from .product_analytics import *
from . import overrides as _overrides
from .schemas import _PaginatedSchema as PaginatedSchema

View file

@ -0,0 +1,22 @@
from typing import Optional, List, Literal, Union, Annotated
from pydantic import Field
from .overrides import BaseModel
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
_PaginatedSchema, PropertyFilterSchema
class EventSearchSchema(BaseModel):
is_event: Literal[True] = True
name: str = Field(...)
properties: Optional[EventPropertiesSchema] = Field(default=None)
ProductAnalyticsGroupedFilter = Annotated[Union[EventSearchSchema, PropertyFilterSchema], \
Field(discriminator='is_event')]
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
filters: List[ProductAnalyticsGroupedFilter] = Field(...)
sort: str = Field(default="startTs")
order: SortOrderType = Field(default=SortOrderType.DESC)

View file

@ -404,6 +404,7 @@ class EventType(str, Enum):
REQUEST_MOBILE = "requestMobile"
ERROR_MOBILE = "errorMobile"
SWIPE_MOBILE = "swipeMobile"
EVENT = "event"
class PerformanceEventType(str, Enum):
@ -464,6 +465,7 @@ class SearchEventOperator(str, Enum):
NOT_CONTAINS = "notContains"
STARTS_WITH = "startsWith"
ENDS_WITH = "endsWith"
PATTERN = "regex"
class ClickEventExtraOperator(str, Enum):
@ -545,7 +547,66 @@ class RequestGraphqlFilterSchema(BaseModel):
return values
class SessionSearchEventSchema2(BaseModel):
class EventPredefinedPropertyType(str, Enum):
TIME = "$time"
SOURCE = "$source"
DURATION_S = "$duration_s"
DESCRIPTION = "description"
AUTO_CAPTURED = "$auto_captured"
SDK_EDITION = "$sdk_edition"
SDK_VERSION = "$sdk_version"
DEVICE_ID = "$device_id"
OS = "$os"
OS_VERSION = "$os_version"
BROWSER = "$browser"
BROWSER_VERSION = "$browser_version"
DEVICE = "$device"
SCREEN_HEIGHT = "$screen_height"
SCREEN_WIDTH = "$screen_width"
CURRENT_URL = "$current_url"
INITIAL_REFERRER = "$initial_referrer"
REFERRING_DOMAIN = "$referring_domain"
REFERRER = "$referrer"
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
SEARCH_ENGINE = "$search_engine"
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
UTM_SOURCE = "utm_source"
UTM_MEDIUM = "utm_medium"
UTM_CAMPAIGN = "utm_campaign"
COUNTRY = "$country"
STATE = "$state"
CITY = "$city"
ISSUE_TYPE = "issue_type"
TAGS = "$tags"
IMPORT = "$import"
class PropertyFilterSchema(BaseModel):
is_event: Literal[False] = False
name: Union[EventPredefinedPropertyType, str] = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
value: List[Union[int, str]] = Field(...)
# property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
@computed_field
@property
def is_predefined(self) -> bool:
return EventPredefinedPropertyType.has_value(self.name)
@model_validator(mode="after")
def transform_name(self):
if isinstance(self.name, Enum):
self.name = self.name.value
return self
class EventPropertiesSchema(BaseModel):
operator: Literal["and", "or"] = Field(...)
filters: List[PropertyFilterSchema] = Field(...)
class SessionSearchEventSchema(BaseModel):
is_event: Literal[True] = True
value: List[Union[str, int]] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...)
@ -553,6 +614,7 @@ class SessionSearchEventSchema2(BaseModel):
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
sourceOperator: Optional[MathOperator] = Field(default=None)
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
properties: Optional[EventPropertiesSchema] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
@ -660,12 +722,12 @@ def add_missing_is_event(values: dict):
# this type is created to allow mixing events&filters and specifying a discriminator
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema],
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
events: List[SessionSearchEventSchema2] = Field(default_factory=list, doc_hidden=True)
events: List[SessionSearchEventSchema] = Field(default_factory=list, doc_hidden=True)
filters: List[GroupedFilterType] = Field(default_factory=list)
sort: str = Field(default="startTs")
order: SortOrderType = Field(default=SortOrderType.DESC)
@ -690,6 +752,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
def add_missing_attributes(cls, values):
# in case isEvent is wrong:
for f in values.get("filters") or []:
if f.get("type") is None:
continue
if EventType.has_value(f["type"]) and not f.get("isEvent"):
f["isEvent"] = True
elif FilterType.has_value(f["type"]) and f.get("isEvent"):
@ -715,6 +779,15 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
f["value"] = vals
return values
@model_validator(mode="after")
def check_pa_event_filter(self):
for v in self.filters + self.events:
if v.type == EventType.EVENT:
assert v.operator in (SearchEventOperator.IS, MathOperator.EQUAL), \
"operator must be {SearchEventOperator.IS} or {MathOperator.EQUAL} for EVENT type"
assert len(v.value) == 1, "value must have 1 single value for EVENT type"
return self
@model_validator(mode="after")
def split_filters_events(self):
n_filters = []
@ -1404,7 +1477,7 @@ class MetricSearchSchema(_PaginatedSchema):
mine_only: bool = Field(default=False)
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
class _HeatMapSearchEventRaw(SessionSearchEventSchema):
type: Literal[EventType.LOCATION] = Field(...)
@ -1529,3 +1602,30 @@ class TagCreate(TagUpdate):
class ScopeSchema(BaseModel):
scope: int = Field(default=1, ge=1, le=2)
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)

61
assist-server/build.sh Normal file
View file

@ -0,0 +1,61 @@
#!/bin/bash
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
ARCH=${ARCH:-amd64}
git_sha=$(git rev-parse --short HEAD)
image_tag=${IMAGE_TAG:-git_sha}
check_prereq() {
which docker || {
echo "Docker not installed, please install docker."
exit 1
}
}
source ../scripts/lib/_docker.sh
[[ $PATCH -eq 1 ]] && {
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
image_tag="${image_tag}-ee"
}
update_helm_release() {
chart=$1
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
# Update the chart version
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
# Update image tags
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
# Commit the changes
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
git commit -m "chore(helm): Updating $chart image release"
}
function build_api() {
destination="_assist-server_ee"
[[ -d ../${destination} ]] && {
echo "Removing previous build cache"
rm -rf ../${destination}
}
cp -R ../assist-server ../${destination}
cd ../${destination} || exit 1
cp -rf ../ee/assist-server/* ./
docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist-server:${image_tag} .
cd ../assist-server || exit 1
rm -rf ../${destination}
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
docker tag ${DOCKER_REPO:-'local'}/assist-server:${image_tag} ${DOCKER_REPO:-'local'}/assist-server:latest
docker push ${DOCKER_REPO:-'local'}/assist-server:latest
}
[[ $SIGN_IMAGE -eq 1 ]] && {
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist-server:${image_tag}
}
echo "build completed for assist-server"
}
check_prereq
build_api $1
if [[ $PATCH -eq 1 ]]; then
update_helm_release assist-server
fi

View file

@ -2,44 +2,71 @@ package main
import (
"context"
"os"
"os/signal"
"syscall"
analyticsConfig "openreplay/backend/internal/config/analytics"
"openreplay/backend/pkg/analytics"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/metrics"
"openreplay/backend/pkg/metrics/database"
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server"
"openreplay/backend/pkg/server/api"
)
func main() {
ctx := context.Background()
log := logger.New()
cfg := analyticsConfig.New(log)
// Observability
webMetrics := web.New("analytics")
dbMetrics := database.New("analytics")
metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
log.Info(ctx, "Cacher service started")
pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
if err != nil {
log.Fatal(ctx, "can't init postgres connection: %s", err)
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
for {
select {
case sig := <-sigchan:
log.Error(ctx, "Caught signal %v: terminating", sig)
os.Exit(0)
}
}
defer pgConn.Close()
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
if err != nil {
log.Fatal(ctx, "can't init services: %s", err)
}
router, err := api.NewRouter(&cfg.HTTP, log)
if err != nil {
log.Fatal(ctx, "failed while creating router: %s", err)
}
router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
server.Run(ctx, log, &cfg.HTTP, router)
}
//
//import (
// "context"
//
// analyticsConfig "openreplay/backend/internal/config/analytics"
// "openreplay/backend/pkg/analytics"
// "openreplay/backend/pkg/db/postgres/pool"
// "openreplay/backend/pkg/logger"
// "openreplay/backend/pkg/metrics"
// "openreplay/backend/pkg/metrics/database"
// "openreplay/backend/pkg/metrics/web"
// "openreplay/backend/pkg/server"
// "openreplay/backend/pkg/server/api"
//)
//
//func main() {
// ctx := context.Background()
// log := logger.New()
// cfg := analyticsConfig.New(log)
// // Observability
// webMetrics := web.New("analytics")
// dbMetrics := database.New("analytics")
// metrics.New(log, append(webMetrics.List(), dbMetrics.List()...))
//
// pgConn, err := pool.New(dbMetrics, cfg.Postgres.String())
// if err != nil {
// log.Fatal(ctx, "can't init postgres connection: %s", err)
// }
// defer pgConn.Close()
//
// builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, dbMetrics, pgConn)
// if err != nil {
// log.Fatal(ctx, "can't init services: %s", err)
// }
//
// router, err := api.NewRouter(&cfg.HTTP, log)
// if err != nil {
// log.Fatal(ctx, "failed while creating router: %s", err)
// }
// router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI)
// router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
//
// server.Run(ctx, log, &cfg.HTTP, router)
//}

View file

@ -111,12 +111,12 @@ var batches = map[string]string{
"pages": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"clicks": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"inputs": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"errors": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", error_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"performance": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"requests": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$duration_s", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"graphql": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issuesEvents": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", issue_type, issue_id, "$current_url", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
"mobile_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
"mobile_custom": `INSERT INTO product_analytics.events (session_id, project_id, event_id, "$event_name", created_at, "$time", distinct_id, "$auto_captured", "$device", "$os_version", "$properties") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
@ -309,6 +309,7 @@ func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *mes
session.UserOSVersion,
"mouse_thrashing",
issueID,
cropString(msg.Url),
jsonString,
); err != nil {
c.checkError("issuesEvents", err)
@ -365,6 +366,7 @@ func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.Iss
session.UserOSVersion,
msg.Type,
issueID,
cropString(msg.Url),
jsonString,
); err != nil {
c.checkError("issuesEvents", err)
@ -552,6 +554,7 @@ func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *type
session.Platform,
session.UserOSVersion,
msgID,
cropString(msg.Url),
jsonString,
); err != nil {
c.checkError("errors", err)

View file

@ -84,7 +84,10 @@ func (p *poolImpl) Begin() (*Tx, error) {
tx, err := p.conn.Begin(context.Background())
p.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "begin", "")
p.metrics.IncreaseTotalRequests("begin", "")
return &Tx{tx, p.metrics}, err
return &Tx{
origTx: tx,
metrics: p.metrics,
}, err
}
func (p *poolImpl) Close() {
@ -94,13 +97,13 @@ func (p *poolImpl) Close() {
// TX - start
type Tx struct {
pgx.Tx
origTx pgx.Tx
metrics database.Database
}
func (tx *Tx) TxExec(sql string, args ...interface{}) error {
start := time.Now()
_, err := tx.Exec(context.Background(), sql, args...)
_, err := tx.origTx.Exec(context.Background(), sql, args...)
method, table := methodName(sql)
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
tx.metrics.IncreaseTotalRequests(method, table)
@ -109,7 +112,7 @@ func (tx *Tx) TxExec(sql string, args ...interface{}) error {
func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
start := time.Now()
res := tx.QueryRow(context.Background(), sql, args...)
res := tx.origTx.QueryRow(context.Background(), sql, args...)
method, table := methodName(sql)
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), method, table)
tx.metrics.IncreaseTotalRequests(method, table)
@ -118,7 +121,7 @@ func (tx *Tx) TxQueryRow(sql string, args ...interface{}) pgx.Row {
func (tx *Tx) TxRollback() error {
start := time.Now()
err := tx.Rollback(context.Background())
err := tx.origTx.Rollback(context.Background())
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "rollback", "")
tx.metrics.IncreaseTotalRequests("rollback", "")
return err
@ -126,7 +129,7 @@ func (tx *Tx) TxRollback() error {
func (tx *Tx) TxCommit() error {
start := time.Now()
err := tx.Commit(context.Background())
err := tx.origTx.Commit(context.Background())
tx.metrics.RecordRequestDuration(float64(time.Now().Sub(start).Milliseconds()), "commit", "")
tx.metrics.IncreaseTotalRequests("commit", "")
return err

View file

@ -5,10 +5,11 @@ import (
"encoding/hex"
"encoding/json"
"fmt"
"github.com/google/uuid"
"hash/fnv"
"strconv"
"github.com/google/uuid"
. "openreplay/backend/pkg/messages"
)
@ -23,41 +24,7 @@ type ErrorEvent struct {
Payload string
Tags map[string]*string
OriginType int
}
func unquote(s string) string {
if s[0] == '"' {
return s[1 : len(s)-1]
}
return s
}
func parseTags(tagsJSON string) (tags map[string]*string, err error) {
if len(tagsJSON) == 0 {
return nil, fmt.Errorf("empty tags")
}
if tagsJSON[0] == '[' {
var tagsArr []json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsArr); err != nil {
return
}
tags = make(map[string]*string)
for _, keyBts := range tagsArr {
tags[unquote(string(keyBts))] = nil
}
} else if tagsJSON[0] == '{' {
var tagsObj map[string]json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsObj); err != nil {
return
}
tags = make(map[string]*string)
for key, valBts := range tagsObj {
val := unquote(string(valBts))
tags[key] = &val
}
}
return
Url string
}
func WrapJSException(m *JSException) (*ErrorEvent, error) {
@ -69,6 +36,7 @@ func WrapJSException(m *JSException) (*ErrorEvent, error) {
Message: m.Message,
Payload: m.Payload,
OriginType: m.TypeID(),
Url: m.Url,
}, nil
}
@ -81,6 +49,7 @@ func WrapIntegrationEvent(m *IntegrationEvent) *ErrorEvent {
Message: m.Message,
Payload: m.Payload,
OriginType: m.TypeID(),
Url: m.Url,
}
}

View file

@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add tracker version to context
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Handler's logic
if req.ProjectKey == nil {
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add projectID to context
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
// Validate tracker version
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Check if the project supports mobile sessions
if !p.IsWeb() {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)

View file

@ -29,7 +29,7 @@ type Task struct {
Duration int
Status string
Path string
tx pool.Tx
tx *pool.Tx
}
func (t *Task) HasToTrim() bool {
@ -65,7 +65,7 @@ func (t *tasksImpl) Get() (task *Task, err error) {
}
}()
task = &Task{tx: pool.Tx{Tx: tx}}
task = &Task{tx: tx}
sql := `SELECT spot_id, crop, duration FROM spots.tasks WHERE status = 'pending' ORDER BY added_time FOR UPDATE SKIP LOCKED LIMIT 1`
err = tx.TxQueryRow(sql).Scan(&task.SpotID, &task.Crop, &task.Duration)
if err != nil {

View file

@ -52,6 +52,7 @@ func NewTranscoder(cfg *spot.Config, log logger.Logger, objStorage objectstorage
tasks: NewTasks(conn),
streams: NewStreams(log, conn, objStorage),
spots: spots,
metrics: metrics,
}
tnsc.prepareWorkers = workers.NewPool(2, 4, tnsc.prepare)
tnsc.transcodeWorkers = workers.NewPool(2, 4, tnsc.transcode)

View file

@ -8,7 +8,6 @@ ignore:
- "**/*/build/**"
- "**/*/.test.*"
- "**/*/version.ts"
review:
poem: false
review_status: false
collapse_walkthrough: true
comment:
layout: "condensed_header, condensed_files, condensed_footer"
hide_project_coverage: TRUE

6
ee/api/.gitignore vendored
View file

@ -223,10 +223,14 @@ Pipfile.lock
/chalicelib/core/sessions/performance_event.py
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
/chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/sessions/__init__.py
/chalicelib/core/sessions/sessions_legacy_mobil.py
/chalicelib/core/sessions/sessions_search_exp.py
/chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps
/chalicelib/core/tags.py
/chalicelib/core/product_analytics
/chalicelib/saml
/chalicelib/utils/__init__.py
/chalicelib/utils/args_transformer.py
@ -289,3 +293,5 @@ Pipfile.lock
/chalicelib/core/errors/errors_ch.py
/chalicelib/core/errors/errors_details.py
/chalicelib/utils/contextual_validators.py
/routers/subs/product_analytics.py
/schemas/product_analytics.py

View file

@ -6,25 +6,23 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.12"
boto3 = "==1.37.21"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
psycopg = {extras = ["pool", "binary"], version = "==3.2.6"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
elasticsearch = "==8.17.2"
jira = "==3.8.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
cachetools = "==5.5.2"
fastapi = "==0.115.12"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
gunicorn = "==23.0.0"
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.6"}
apscheduler = "==3.11.0"
python3-saml = "==1.16.0"
python-multipart = "==0.0.20"
redis = "==5.2.1"
azure-storage-blob = "==12.24.1"
azure-storage-blob = "==12.25.0"
[dev-packages]

View file

@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
from crons import core_crons, ee_crons, core_dynamic_crons
from routers import core, core_dynamic
from routers import ee
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
from routers.subs import v1_api_ee
if config("ENABLE_SSO", cast=bool, default=True):
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
app.include_router(spot.app)
app.include_router(spot.app_apikey)
app.include_router(product_anaytics.public_app)
app.include_router(product_anaytics.app)
app.include_router(product_anaytics.app_apikey)
app.include_router(product_analytics.public_app, prefix="/ap")
app.include_router(product_analytics.app, prefix="/ap")
app.include_router(product_analytics.app_apikey, prefix="/ap")
if config("ENABLE_SSO", cast=bool, default=True):
app.include_router(saml.public_app)

View file

@ -1,17 +0,0 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
from . import sessions_pg
from . import sessions_pg as sessions_legacy
from . import sessions_ch
from . import sessions_search as sessions_search_legacy
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logger.info(">>> Using experimental sessions search")
from . import sessions_ch as sessions
from . import sessions_search_exp as sessions_search
else:
from . import sessions_pg as sessions
from . import sessions_search as sessions_search

View file

@ -44,11 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
rm -rf ./chalicelib/core/sessions/performance_event.py
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/sessions/__init__.py
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps
rm -rf ./chalicelib/core/user_testing.py
rm -rf ./chalicelib/core/tags.py
rm -rf ./chalicelib/core/product_analytics
rm -rf ./chalicelib/saml
rm -rf ./chalicelib/utils/__init__.py
rm -rf ./chalicelib/utils/args_transformer.py
@ -109,3 +113,5 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
rm -rf ./chalicelib/core/errors/errors_ch.py
rm -rf ./chalicelib/core/errors/errors_details.py
rm -rf ./chalicelib/utils/contextual_validators.py
rm -rf ./routers/subs/product_analytics.py
rm -rf ./schemas/product_analytics.py

View file

@ -1,19 +1,18 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.21
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.12
uvicorn[standard]==0.34.0
python-decouple==3.8
pydantic[email]==2.10.6
apscheduler==3.11.0
azure-storage-blob==12.24.1
azure-storage-blob==12.25.0

View file

@ -1,19 +1,18 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.21
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.12
python-decouple==3.8
pydantic[email]==2.10.6
apscheduler==3.11.0
redis==5.2.1
azure-storage-blob==12.24.1
azure-storage-blob==12.25.0

View file

@ -1,16 +1,15 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.12
boto3==1.37.21
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
clickhouse-driver[lz4]==0.2.9
psycopg[pool,binary]==3.2.6
clickhouse-connect==0.8.15
elasticsearch==8.17.1
elasticsearch==8.17.2
jira==3.8.0
cachetools==5.5.1
cachetools==5.5.2
fastapi==0.115.8
fastapi==0.115.12
uvicorn[standard]==0.34.0
gunicorn==23.0.0
python-decouple==3.8
@ -19,10 +18,9 @@ apscheduler==3.11.0
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
python3-saml==1.16.0
--no-binary=lxml
python-multipart==0.0.20
redis==5.2.1
#confluent-kafka==2.1.0
azure-storage-blob==12.24.1
azure-storage-blob==12.25.0

View file

@ -1,4 +1,5 @@
from .schemas import *
from .schemas_ee import *
from .assist_stats_schema import *
from .product_analytics import *
from . import overrides as _overrides

View file

@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
from chalicelib.utils.TimeUTC import TimeUTC
from . import schemas
from .overrides import BaseModel, Enum, ORUnion
from .overrides import BaseModel, Enum
from .transformers_validators import remove_whitespace
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
return values
class SessionModel(BaseModel):
duration: int
errorsCount: int
eventsCount: int
favorite: bool = Field(default=False)
issueScore: int
issueTypes: List[schemas.IssueType] = Field(default=[])
metadata: dict = Field(default={})
pagesCount: int
platform: str
projectId: int
sessionId: str
startTs: int
timezone: Optional[str]
userAnonymousId: Optional[str]
userBrowser: str
userCity: str
userCountry: str
userDevice: Optional[str]
userDeviceType: str
userId: Optional[str]
userOs: str
userState: str
userUuid: str
viewed: bool = Field(default=False)
class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1)
_transform_name = field_validator('name', mode="before")(remove_whitespace)

5
ee/assist-server/.gitignore vendored Normal file
View file

@ -0,0 +1,5 @@
.idea
node_modules
npm-debug.log
.cache
*.mmdb

View file

@ -0,0 +1,24 @@
ARG ARCH=amd64
FROM --platform=linux/$ARCH node:23-alpine
LABEL Maintainer="Zavorotynskiy Alexander <zavorotynskiy@pm.me>"
RUN apk add --no-cache tini git libc6-compat
ARG envarg
ENV ENTERPRISE_BUILD=${envarg} \
MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \
PRIVATE_ENDPOINTS=false \
LISTEN_PORT=9001 \
ERROR=1 \
NODE_ENV=production
WORKDIR /work
COPY package.json .
COPY package-lock.json .
RUN npm install
COPY . .
RUN adduser -u 1001 openreplay -D
USER 1001
ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-City.mmdb $MAXMINDDB_FILE
ENTRYPOINT ["/sbin/tini", "--"]
CMD npm start

View file

@ -0,0 +1,168 @@
const jwt = require('jsonwebtoken');
const uaParser = require('ua-parser-js');
const {geoip} = require('./geoIP');
const {logger} = require('./logger');
let PROJECT_KEY_LENGTH = parseInt(process.env.PROJECT_KEY_LENGTH) || 20;
const IDENTITIES = {agent: 'agent', session: 'session'};
const EVENTS_DEFINITION = {
listen: {
UPDATE_EVENT: "UPDATE_SESSION", // tab become active/inactive, page title change, changed session object (rare case), call start/end
CONNECT_ERROR: "connect_error",
CONNECT_FAILED: "connect_failed",
ERROR: "error"
},
//The following list of events will be only emitted by the server
server: {
UPDATE_SESSION: "SERVER_UPDATE_SESSION"
}
};
EVENTS_DEFINITION.emit = {
NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
};
const BASE_sessionInfo = {
"pageTitle": "Page",
"active": false,
"live": true,
"sessionID": "0",
"metadata": {},
"userID": "",
"userUUID": "",
"projectKey": "",
"revID": "",
"timestamp": 0,
"trackerVersion": "",
"isSnippet": true,
"userOs": "",
"userBrowser": "",
"userBrowserVersion": "",
"userDevice": "",
"userDeviceType": "",
"userCountry": "",
"userState": "",
"userCity": "",
"projectId": 0
};
const extractPeerId = (peerId) => {
const parts = peerId.split("-");
if (parts.length < 2 || parts.length > 3) {
logger.debug(`Invalid peerId format: ${peerId}`);
return {};
}
if (PROJECT_KEY_LENGTH > 0 && parts[0].length !== PROJECT_KEY_LENGTH) {
logger.debug(`Invalid project key length in peerId: ${peerId}`);
return {};
}
const [projectKey, sessionId, tabId = generateRandomTabId()] = parts;
return { projectKey, sessionId, tabId };
};
const generateRandomTabId = () => (Math.random() + 1).toString(36).substring(2);
function processPeerInfo(socket) {
socket._connectedAt = new Date();
const { projectKey, sessionId, tabId } = extractPeerId(socket.handshake.query.peerId || "");
Object.assign(socket.handshake.query, {
roomId: projectKey && sessionId ? `${projectKey}-${sessionId}` : null,
projectKey,
sessId: sessionId,
tabId
});
logger.debug(`Connection details: projectKey:${projectKey}, sessionId:${sessionId}, tabId:${tabId}, roomId:${socket.handshake.query.roomId}`);
}
/**
* extracts and populate socket with information
* @Param {socket} used socket
* */
const extractSessionInfo = function (socket) {
if (socket.handshake.query.sessionInfo !== undefined) {
logger.debug(`received headers: ${socket.handshake.headers}`);
socket.handshake.query.sessionInfo = JSON.parse(socket.handshake.query.sessionInfo);
socket.handshake.query.sessionInfo = {...BASE_sessionInfo, ...socket.handshake.query.sessionInfo};
let ua = uaParser(socket.handshake.headers['user-agent']);
socket.handshake.query.sessionInfo.userOs = ua.os.name || null;
socket.handshake.query.sessionInfo.userBrowser = ua.browser.name || null;
socket.handshake.query.sessionInfo.userBrowserVersion = ua.browser.version || null;
socket.handshake.query.sessionInfo.userDevice = ua.device.model || null;
socket.handshake.query.sessionInfo.userDeviceType = ua.device.type || 'desktop';
socket.handshake.query.sessionInfo.userCountry = null;
socket.handshake.query.sessionInfo.userState = null;
socket.handshake.query.sessionInfo.userCity = null;
if (geoip() !== null) {
logger.debug(`looking for location of ${socket.handshake.headers['x-forwarded-for'] || socket.handshake.address}`);
try {
let ip = socket.handshake.headers['x-forwarded-for'] || socket.handshake.address;
ip = ip.split(",")[0];
let info = geoip().city(ip);
socket.handshake.query.sessionInfo.userCountry = info.country.isoCode;
socket.handshake.query.sessionInfo.userCity = info.city.names.en;
socket.handshake.query.sessionInfo.userState = info.subdivisions.length > 0 ? info.subdivisions[0].names.en : null;
} catch (e) {
logger.debug(`geoip-country failed: ${e}`);
}
}
}
}
function errorHandler(listenerName, error) {
logger.error(`Error detected from ${listenerName}\n${error}`);
}
const JWT_TOKEN_PREFIX = "Bearer ";
function check(socket, next) {
if (socket.handshake.query.identity === IDENTITIES.session) {
return next();
}
if (socket.handshake.query.peerId && socket.handshake.auth && socket.handshake.auth.token) {
let token = socket.handshake.auth.token;
if (token.startsWith(JWT_TOKEN_PREFIX)) {
token = token.substring(JWT_TOKEN_PREFIX.length);
}
jwt.verify(token, process.env.ASSIST_JWT_SECRET, (err, decoded) => {
logger.debug(`JWT payload: ${decoded}`);
if (err) {
logger.debug(err);
return next(new Error('Authentication error'));
}
const {projectKey, sessionId} = extractPeerId(socket.handshake.query.peerId);
if (!projectKey || !sessionId) {
logger.debug(`Missing attribute: projectKey:${projectKey}, sessionId:${sessionId}`);
return next(new Error('Authentication error'));
}
if (String(projectKey) !== String(decoded.projectKey) || String(sessionId) !== String(decoded.sessionId)) {
logger.debug(`Trying to access projectKey:${projectKey} instead of ${decoded.projectKey} or
to sessionId:${sessionId} instead of ${decoded.sessionId}`);
return next(new Error('Authorization error'));
}
socket.decoded = decoded;
return next();
});
} else {
logger.debug(`something missing in handshake: ${socket.handshake}`);
return next(new Error('Authentication error'));
}
}
module.exports = {
processPeerInfo,
extractPeerId,
extractSessionInfo,
EVENTS_DEFINITION,
IDENTITIES,
errorHandler,
authorizer: {check}
};

View file

@ -0,0 +1,109 @@
const {logger} = require('./logger');
const {createClient} = require("redis");
const crypto = require("crypto");
let redisClient;
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
redisClient = createClient({url: REDIS_URL});
redisClient.on("error", (error) => logger.error(`Redis cache error : ${error}`));
void redisClient.connect();
function generateNodeID() {
const buffer = crypto.randomBytes(8);
return "node_"+buffer.readBigUInt64BE(0).toString();
}
const PING_INTERVAL = parseInt(process.env.PING_INTERVAL_SECONDS) || 25;
const CACHE_REFRESH_INTERVAL = parseInt(process.env.CACHE_REFRESH_INTERVAL_SECONDS) || 10;
const pingInterval = PING_INTERVAL + PING_INTERVAL/2;
const cacheRefreshInterval = CACHE_REFRESH_INTERVAL + CACHE_REFRESH_INTERVAL/2;
const cacheRefreshIntervalMs = CACHE_REFRESH_INTERVAL * 1000;
let lastCacheUpdateTime = 0;
let cacheRefresher = null;
const nodeID = process.env.HOSTNAME || generateNodeID();
const addSessionToCache = async function (sessionID, sessionData) {
try {
await redisClient.set(`active_sessions:${sessionID}`, JSON.stringify(sessionData), 'EX', pingInterval);
logger.debug(`Session ${sessionID} stored in Redis`);
} catch (error) {
logger.error(error);
}
}
const renewSession = async function (sessionID){
try {
await redisClient.expire(`active_sessions:${sessionID}`, pingInterval);
logger.debug(`Session ${sessionID} renewed in Redis`);
} catch (error) {
logger.error(error);
}
}
const getSessionFromCache = async function (sessionID) {
try {
const sessionData = await redisClient.get(`active_sessions:${sessionID}`);
if (sessionData) {
logger.debug(`Session ${sessionID} retrieved from Redis`);
return JSON.parse(sessionData);
}
return null;
} catch (error) {
logger.error(error);
return null;
}
}
const removeSessionFromCache = async function (sessionID) {
try {
await redisClient.del(`active_sessions:${sessionID}`);
logger.debug(`Session ${sessionID} removed from Redis`);
} catch (error) {
logger.error(error);
}
}
const setNodeSessions = async function (nodeID, sessionIDs) {
try {
await redisClient.set(`node:${nodeID}:sessions`, JSON.stringify(sessionIDs), 'EX', cacheRefreshInterval);
logger.debug(`Node ${nodeID} sessions stored in Redis`);
} catch (error) {
logger.error(error);
}
}
function startCacheRefresher(io) {
if (cacheRefresher) clearInterval(cacheRefresher);
cacheRefresher = setInterval(async () => {
const now = Date.now();
if (now - lastCacheUpdateTime < cacheRefreshIntervalMs) {
return;
}
logger.debug('Background refresh triggered');
try {
const startTime = performance.now();
const sessionIDs = new Set();
const result = await io.fetchSockets();
result.forEach((socket) => {
if (socket.handshake.query.sessId) {
sessionIDs.add(socket.handshake.query.sessId);
}
})
await setNodeSessions(nodeID, Array.from(sessionIDs));
lastCacheUpdateTime = now;
const duration = performance.now() - startTime;
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
} catch (error) {
logger.error(`Background refresh error: ${error}`);
}
}, cacheRefreshIntervalMs / 2);
}
module.exports = {
addSessionToCache,
renewSession,
getSessionFromCache,
removeSessionFromCache,
startCacheRefresher,
}

View file

@ -0,0 +1,21 @@
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
const {logger} = require('./logger');
let geoip = null;
if (process.env.MAXMINDDB_FILE !== undefined) {
geoip2Reader.open(process.env.MAXMINDDB_FILE, {})
.then(reader => {
geoip = reader;
})
.catch(error => {
logger.error(`Error while opening the MAXMINDDB_FILE, err: ${error}`);
});
} else {
logger.error("!!! please provide a valid value for MAXMINDDB_FILE env var.");
}
module.exports = {
geoip: () => {
return geoip;
}
}

View file

@ -0,0 +1,23 @@
const winston = require('winston');
const isDebugMode = process.env.debug === "1";
const logLevel = isDebugMode ? 'debug' : 'info';
const logger = winston.createLogger({
level: logLevel,
format: winston.format.combine(
winston.format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss.SSS' // The same format as in backend services
}),
winston.format.errors({stack: true}),
winston.format.json()
),
defaultMeta: {service: process.env.SERVICE_NAME || 'assist'},
transports: [
new winston.transports.Console(),
],
});
module.exports = {
logger,
}

View file

@ -0,0 +1,254 @@
const {
processPeerInfo,
IDENTITIES,
EVENTS_DEFINITION,
extractSessionInfo,
errorHandler
} = require("./assist");
const {
addSessionToCache,
renewSession,
removeSessionFromCache
} = require('./cache');
const {
logger
} = require('./logger');
const deepMerge = require('@fastify/deepmerge')({all: true});
let io;
const setSocketIOServer = function (server) {
io = server;
}
function sendFrom(from, to, eventName, ...data) {
from.to(to).emit(eventName, ...data);
}
function sendTo(to, eventName, ...data) {
sendFrom(io, to, eventName, ...data);
}
const fetchSockets = async function (roomID) {
if (!io) {
return [];
}
try {
if (roomID) {
return await io.in(roomID).fetchSockets();
} else {
return await io.fetchSockets();
}
} catch (error) {
logger.error('Error fetching sockets:', error);
return [];
}
}
const findSessionSocketId = async (roomId, tabId) => {
let pickFirstSession = tabId === undefined;
const connected_sockets = await fetchSockets(roomId);
for (let socket of connected_sockets) {
if (socket.handshake.query.identity === IDENTITIES.session) {
if (pickFirstSession) {
return socket.id;
} else if (socket.handshake.query.tabId === tabId) {
return socket.id;
}
}
}
return null;
};
async function getRoomData(roomID) {
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
const connected_sockets = await fetchSockets(roomID);
if (connected_sockets.length > 0) {
for (let socket of connected_sockets) {
if (socket.handshake.query.identity === IDENTITIES.session) {
tabsCount++;
tabIDs.push(socket.handshake.query.tabId);
} else {
agentsCount++;
agentIDs.push(socket.id);
}
}
} else {
tabsCount = -1;
agentsCount = -1;
}
return {tabsCount, agentsCount, tabIDs, agentIDs};
}
async function onConnect(socket) {
logger.debug(`A new client:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
// Drop unknown socket.io connections
if (socket.handshake.query.identity === undefined || socket.handshake.query.peerId === undefined || socket.handshake.query.sessionInfo === undefined) {
logger.debug(`something is undefined, refusing connexion`);
return socket.disconnect();
}
processPeerInfo(socket);
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
if (socket.handshake.query.identity === IDENTITIES.session) {
// Check if session with the same tabID already connected, if so, refuse new connexion
if (tabsCount > 0) {
for (let tab of tabIDs) {
if (tab === socket.handshake.query.tabId) {
logger.debug(`session already connected, refusing new connexion, peerId: ${socket.handshake.query.peerId}`);
sendTo(socket.id, EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
return socket.disconnect();
}
}
}
extractSessionInfo(socket);
if (tabsCount < 0) {
// New session creates new room
}
// Inform all connected agents about reconnected session
if (agentsCount > 0) {
logger.debug(`notifying new session about agent-existence`);
sendTo(socket.id, EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
}
} else if (tabsCount <= 0) {
logger.debug(`notifying new agent about no SESSIONS with peerId:${socket.handshake.query.peerId}`);
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
}
await socket.join(socket.handshake.query.roomId);
logger.debug(`${socket.id} joined room:${socket.handshake.query.roomId}, as:${socket.handshake.query.identity}, connections:${agentsCount + tabsCount + 1}`)
// Add session to cache
if (socket.handshake.query.identity === IDENTITIES.session) {
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
}
if (socket.handshake.query.identity === IDENTITIES.agent) {
if (socket.handshake.query.agentInfo !== undefined) {
socket.handshake.query.agentInfo = JSON.parse(socket.handshake.query.agentInfo);
socket.handshake.query.agentID = socket.handshake.query.agentInfo.id;
}
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
}
socket.conn.on("packet", (packet) => {
if (packet.type === 'pong') {
renewSession(socket.handshake.query.sessId);
}
});
// Set disconnect handler
socket.on('disconnect', () => onDisconnect(socket));
// Handle update event
socket.on(EVENTS_DEFINITION.listen.UPDATE_EVENT, (...args) => onUpdateEvent(socket, ...args));
// Handle webrtc events
socket.on(EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, (...args) => onWebrtcAgentHandler(socket, ...args));
// Handle errors
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
// Handle all other events (usually dom's mutations and user's actions)
socket.onAny((eventName, ...args) => onAny(socket, eventName, ...args));
}
async function onDisconnect(socket) {
logger.debug(`${socket.id} disconnected from ${socket.handshake.query.roomId}`);
if (socket.handshake.query.identity === IDENTITIES.agent) {
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
}
logger.debug("checking for number of connected agents and sessions");
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(socket.handshake.query.roomId);
if (tabsCount <= 0) {
await removeSessionFromCache(socket.handshake.query.sessId);
}
if (tabsCount === -1 && agentsCount === -1) {
logger.debug(`room not found: ${socket.handshake.query.roomId}`);
return;
}
if (tabsCount === 0) {
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no SESSIONS`);
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_SESSIONS);
}
if (agentsCount === 0) {
logger.debug(`notifying everyone in ${socket.handshake.query.roomId} about no AGENTS`);
sendFrom(socket, socket.handshake.query.roomId, EVENTS_DEFINITION.emit.NO_AGENTS);
}
}
async function onUpdateEvent(socket, ...args) {
logger.debug(`${socket.id} sent update event.`);
if (socket.handshake.query.identity !== IDENTITIES.session) {
logger.debug('Ignoring update event.');
return
}
args[0] = updateSessionData(socket, args[0])
socket.handshake.query.sessionInfo = deepMerge(socket.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
// update session cache
await addSessionToCache(socket.handshake.query.sessId, socket.handshake.query.sessionInfo);
// Update sessionInfo for all agents in the room
const connected_sockets = await fetchSockets(socket.handshake.query.roomId);
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
item.handshake.query.sessionInfo = deepMerge(item.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
} else if (item.handshake.query.identity === IDENTITIES.agent) {
sendFrom(socket, item.id, EVENTS_DEFINITION.emit.UPDATE_EVENT, args[0]);
}
}
}
async function onWebrtcAgentHandler(socket, ...args) {
if (socket.handshake.query.identity === IDENTITIES.agent) {
const agentIdToConnect = args[0]?.data?.toAgentId;
logger.debug(`${socket.id} sent webrtc event to agent:${agentIdToConnect}`);
if (agentIdToConnect && socket.handshake.sessionData.AGENTS_CONNECTED.includes(agentIdToConnect)) {
sendFrom(socket, agentIdToConnect, EVENTS_DEFINITION.listen.WEBRTC_AGENT_CALL, args[0]);
}
}
}
async function onAny(socket, eventName, ...args) {
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
logger.debug(`received event:${eventName}, should be handled by another listener, stopping onAny.`);
return
}
args[0] = updateSessionData(socket, args[0])
if (socket.handshake.query.identity === IDENTITIES.session) {
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to room:${socket.handshake.query.roomId}`);
sendFrom(socket, socket.handshake.query.roomId, eventName, args[0]);
} else {
logger.debug(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to session of room:${socket.handshake.query.roomId}`);
let socketId = await findSessionSocketId(socket.handshake.query.roomId, args[0]?.meta?.tabId);
if (socketId === null) {
logger.debug(`session not found for:${socket.handshake.query.roomId}`);
sendTo(socket.id, EVENTS_DEFINITION.emit.NO_SESSIONS);
} else {
logger.debug("message sent");
sendTo(socket.id, eventName, socket.id, args[0]);
}
}
}
// Back compatibility (add top layer with meta information)
function updateSessionData(socket, sessionData) {
if (sessionData?.meta === undefined && socket.handshake.query.identity === IDENTITIES.session) {
sessionData = {meta: {tabId: socket.handshake.query.tabId, version: 1}, data: sessionData};
}
return sessionData
}
module.exports = {
onConnect,
setSocketIOServer,
}

1761
ee/assist-server/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,24 @@
{
"name": "assist-server",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@fastify/deepmerge": "^3.0.0",
"@maxmind/geoip2-node": "^6.0.0",
"express": "^4.21.2",
"jsonwebtoken": "^9.0.2",
"redis": "^4.7.0",
"socket.io": "^4.8.1",
"socket.io-client": "^4.8.1",
"ua-parser-js": "^2.0.3",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.51.0",
"winston": "^3.17.0"
}
}

View file

@ -0,0 +1,67 @@
const { App } = require('uWebSockets.js');
const { Server } = require('socket.io');
const { logger } = require("./app/logger");
const { authorizer } = require("./app/assist");
const { onConnect, setSocketIOServer } = require("./app/socket");
const { startCacheRefresher } = require("./app/cache");
const app = App();
const pingInterval = parseInt(process.env.PING_INTERVAL) || 25000;
const getCompressionConfig = function () {
// WS: The theoretical overhead per socket is 19KB (11KB for compressor and 8KB for decompressor)
let perMessageDeflate = false;
if (process.env.COMPRESSION === "true") {
logger.info(`WS compression: enabled`);
perMessageDeflate = {
zlibDeflateOptions: {
windowBits: 10,
memLevel: 1
},
zlibInflateOptions: {
windowBits: 10
}
}
} else {
logger.info(`WS compression: disabled`);
}
return {
perMessageDeflate: perMessageDeflate,
clientNoContextTakeover: true
};
}
const io = new Server({
maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6,
pingInterval: pingInterval, // Will use it for cache invalidation
cors: {
origin: "*", // Allow connections from any origin (for development)
methods: ["GET", "POST"],
credentials: true
},
path: '/socket',
...getCompressionConfig()
});
io.use(async (socket, next) => await authorizer.check(socket, next));
io.on('connection', (socket) => onConnect(socket));
io.attachApp(app);
io.engine.on("headers", (headers) => {
headers["x-host-id"] = process.env.HOSTNAME || "unknown";
});
setSocketIOServer(io);
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
const PORT = parseInt(process.env.PORT) || 9001;
app.listen(PORT, (token) => {
if (token) {
console.log(`Server running at http://${HOST}:${PORT}`);
} else {
console.log(`Failed to listen on port ${PORT}`);
}
});
startCacheRefresher(io);
process.on('uncaughtException', err => {
logger.error(`Uncaught Exception: ${err}`);
});

View file

@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
// For the ender service only
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
rows, err := s.db.Query(`
SELECT
session_id,
CASE
WHEN duration IS NULL OR duration < 0 THEN 0
ELSE duration
END,
start_ts
FROM sessions
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
if err != nil {
return nil, err
}

View file

@ -0,0 +1,168 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
DROP TABLE IF EXISTS product_analytics.all_events;
CREATE TABLE IF NOT EXISTS product_analytics.all_events
(
project_id UInt16,
auto_captured BOOL DEFAULT FALSE,
event_name String,
display_name String DEFAULT '',
description String DEFAULT '',
event_count_l30days UInt32 DEFAULT 0,
query_count_l30days UInt32 DEFAULT 0,
created_at DateTime64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, auto_captured, event_name);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
TO product_analytics.all_events AS
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
`$auto_captured` AS auto_captured,
`$event_name` AS event_name,
display_name,
description
FROM product_analytics.events
LEFT JOIN (SELECT project_id,
auto_captured,
event_name,
display_name,
description
FROM product_analytics.all_events
WHERE all_events.display_name != ''
OR all_events.description != '') AS old_data
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
events.`$event_name` = old_data.event_name);
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
value_type String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name, value_type);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
DROP TABLE IF EXISTS product_analytics.all_properties;
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
(
project_id UInt16,
property_name String,
is_event_property BOOL,
display_name String DEFAULT '',
description String DEFAULT '',
status String DEFAULT 'visible' COMMENT 'visible/hidden/dropped',
data_count UInt32 DEFAULT 1,
query_count UInt32 DEFAULT 0,
created_at DateTime64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
(
project_id UInt16,
property_name String,
is_event_property BOOL,
value String,
_timestamp DateTime DEFAULT now()
)
ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`$properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name
UNION ALL
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name;

View file

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
@ -88,7 +88,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
TTL datetime + INTERVAL 1 MONTH;
@ -140,7 +140,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 3 MONTH
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
@ -189,7 +189,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type)
TTL _timestamp + INTERVAL 3 MONTH;
TTL _timestamp + INTERVAL 1 MONTH;
@ -330,7 +330,7 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
TTL datetime + INTERVAL 1 MONTH;
SET allow_experimental_json_type = 1;
@ -639,9 +639,11 @@ CREATE TABLE IF NOT EXISTS product_analytics.group_properties
-- The full list of events
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.all_events
(
project_id UInt16,
auto_captured BOOL DEFAULT FALSE,
event_name String,
display_name String DEFAULT '',
description String DEFAULT '',
@ -651,10 +653,68 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
created_at DateTime64,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name);
ORDER BY (project_id, auto_captured, event_name);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill all_events using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_events_extractor_mv
TO product_analytics.all_events AS
SELECT DISTINCT ON (project_id,auto_captured,event_name) project_id,
`$auto_captured` AS auto_captured,
`$event_name` AS event_name,
display_name,
description
FROM product_analytics.events
LEFT JOIN (SELECT project_id,
auto_captured,
event_name,
display_name,
description
FROM product_analytics.all_events
WHERE all_events.display_name != ''
OR all_events.description != '') AS old_data
ON (events.project_id = old_data.project_id AND events.`$auto_captured` = old_data.auto_captured AND
events.`$event_name` = old_data.event_name);
-- -------- END ---------
-- The full list of event-properties (used to tell which property belongs to which event)
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
(
project_id UInt16,
event_name String,
property_name String,
value_type String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, event_name, property_name, value_type);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill event_properties using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_properties_extractor_mv
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`$properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name;
-- Incremental materialized view to fill event_properties using properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.event_cproperties_extractor
TO product_analytics.event_properties AS
SELECT project_id,
`$event_name` AS event_name,
property_name,
JSONType(JSONExtractRaw(toString(`properties`), property_name)) AS value_type
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name;
-- -------- END ---------
-- The full list of properties (events and users)
-- Experimental: This table is filled by an incremental materialized view
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
(
project_id UInt16,
@ -670,3 +730,95 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_properties
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
-- ----------------- This is experimental, if it doesn't work, we need to do it in db worker -------------
-- Incremental materialized view to fill all_properties using $properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_properties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
-- Incremental materialized view to fill all_properties using properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.all_cproperties_extractor_mv
TO product_analytics.all_properties AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
LEFT JOIN (SELECT project_id,
property_name,
display_name,
description,
status,
data_count,
query_count
FROM product_analytics.all_properties
WHERE (all_properties.display_name != ''
OR all_properties.description != '')
AND is_event_property) AS old_data
ON (events.project_id = old_data.project_id AND property_name = old_data.property_name);
-- -------- END ---------
-- Some random examples of property-values, limited by 2 per property
-- Experimental: This table is filled by a refreshable materialized view
CREATE TABLE IF NOT EXISTS product_analytics.property_values_samples
(
project_id UInt16,
property_name String,
is_event_property BOOL,
value String,
_timestamp DateTime DEFAULT now()
)
ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, property_name, is_event_property);
-- Incremental materialized view to get random examples of property values using $properties & properties
CREATE MATERIALIZED VIEW IF NOT EXISTS product_analytics.property_values_sampler_mv
REFRESH EVERY 30 HOUR TO product_analytics.property_values_samples AS
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`$properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`$properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name
UNION ALL
-- using union because each table should be the target of 1 single refreshable MV
SELECT project_id,
property_name,
TRUE AS is_event_property,
JSONExtractString(toString(`properties`), property_name) AS value
FROM product_analytics.events
ARRAY JOIN JSONExtractKeys(toString(`properties`)) as property_name
WHERE randCanonical() < 0.5 -- This randomly skips inserts
AND value != ''
LIMIT 2 BY project_id,property_name;

View file

@ -0,0 +1,30 @@
\set previous_version 'v1.22.0-ee'
\set next_version 'v1.23.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -1,4 +1,4 @@
\set or_version 'v1.22.0-ee'
\set or_version 'v1.23.0-ee'
SET client_min_messages TO NOTICE;
\set ON_ERROR_STOP true
SELECT EXISTS (SELECT 1

View file

@ -0,0 +1,3 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
DROP TABLE IF EXISTS product_analytics.event_properties;

View file

@ -0,0 +1,27 @@
\set previous_version 'v1.23.0-ee'
\set next_version 'v1.22.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
MINIO_SECRET_KEY = ''
# APP and TRACKER VERSIONS
VERSION = 1.22.0
TRACKER_VERSION = '16.0.1'
VERSION = 1.23.0
TRACKER_VERSION = '17.0.0'

View file

@ -16,10 +16,10 @@ function ProfilerDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerProfiler from '@openreplay/tracker-profiler';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
const fn = profiler('call_name')(() => {
//...
}, thisArg); // thisArg is optional`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
function AssistNpm(props) {
const { t } = useTranslation();
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerAssist from '@openreplay/tracker-assist';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}',
});
tracker.start()
tracker.use(trackerAssist(options)); // check the list of available options below`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerAssist from '@openreplay/tracker-assist/cjs';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}'
});
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below

View file

@ -14,19 +14,20 @@ function GraphQLDoc() {
const projectKey = siteId
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerGraphQL from '@openreplay/tracker-graphql';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
//...
export const recordGraphQL = tracker.use(trackerGraphQL());`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -15,20 +15,21 @@ function MobxDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
const mobxUsage = `import { tracker } from '@openreplay/tracker';
import trackerMobX from '@openreplay/tracker-mobx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below
tracker.start();
`;
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerMobX from '@openreplay/tracker-mobx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below

View file

@ -16,10 +16,10 @@ function NgRxDoc() {
: sites[0]?.projectKey;
const usage = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerNgRx from '@openreplay/tracker-ngrx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
export class AppModule {}`;
const usageCjs = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -17,10 +17,10 @@ function PiniaDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()

View file

@ -16,10 +16,10 @@ function ReduxDoc() {
: sites[0]?.projectKey;
const usage = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerRedux from '@openreplay/tracker-redux';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = createStore(
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
);`;
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerRedux from '@openreplay/tracker-redux/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,10 +16,10 @@ function VueDoc() {
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = new Vuex.Store({
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
});`;
const usageCjs = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerVuex from '@openreplay/tracker-vuex/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,11 +16,10 @@ function ZustandDoc(props) {
: sites[0]?.projectKey;
const usage = `import create from "zustand";
import Tracker from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});
@ -43,11 +42,12 @@ const useBearStore = create(
)
`;
const usageCjs = `import create from "zustand";
import Tracker from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});

View file

@ -3,6 +3,7 @@ import withPageTitle from 'HOCs/withPageTitle';
import { PageTitle } from 'UI';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import LanguageSwitcher from "App/components/LanguageSwitcher";
import Settings from './Settings';
import ChangePassword from './ChangePassword';
import styles from './profileSettings.module.css';
@ -20,107 +21,90 @@ function ProfileSettings() {
return (
<div className="bg-white rounded-lg border shadow-sm p-5">
<PageTitle title={<div>{t('Account')}</div>} />
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('Profile')}</h4>
<div className={styles.info}>
{t(
'Your email address is your identity on OpenReplay and is used to login.',
)}
</div>
</div>
<div>
<Settings />
</div>
</div>
<Section
title={t('Profile')}
description={t('Your email address is your identity on OpenReplay and is used to login.')}
children={<Settings />}
/>
<div className="border-b my-10" />
{account.hasPassword && (
<>
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('Change Password')}</h4>
<div className={styles.info}>
{t('Updating your password from time to time enhances your accounts security.')}
</div>
</div>
<div>
<ChangePassword />
</div>
</div>
<Section
title={t('Change Password')}
description={t('Updating your password from time to time enhaces your accounts security')}
children={<ChangePassword />}
/>
<div className="border-b my-10" />
</>
)}
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('Organization API Key')}</h4>
<div className={styles.info}>
{t('Your API key gives you access to an extra set of services.')}
</div>
</div>
<div>
<Api />
</div>
</div>
<Section
title={t('Interface Language')}
description={t('Select the language in which OpenReplay will appear.')}
children={<LanguageSwitcher />}
/>
<Section
title={t('Organization API Key')}
description={t('Your API key gives you access to an extra set of services.')}
children={<Api />}
/>
{isEnterprise && (account.admin || account.superAdmin) && (
<>
<div className="border-b my-10" />
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('Tenant Key')}</h4>
<div className={styles.info}>
{t('For SSO (SAML) authentication.')}
</div>
</div>
<div>
<TenantKey />
</div>
</div>
<Section
title={t('Tenant Key')}
description={t('For SSO (SAML) authentication.')}
children={<TenantKey />}
/>
</>
)}
{!isEnterprise && (
<>
<div className="border-b my-10" />
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('Data Collection')}</h4>
<div className={styles.info}>
{t('Enables you to control how OpenReplay captures data on your organizations usage to improve our product.')}
</div>
</div>
<div>
<OptOut />
</div>
</div>
<Section
title={t('Data Collection')}
description={t('Enables you to control how OpenReplay captures data on your organizations usage to improve our product.')}
children={<OptOut />}
/>
</>
)}
{account.license && (
<>
<div className="border-b my-10" />
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{t('License')}</h4>
<div className={styles.info}>
{t('License key and expiration date.')}
</div>
</div>
<div>
<Licenses />
</div>
</div>
<Section title={t('License')} description={t('License key and expiration date.')} children={<Licenses />} />
</>
)}
</div>
);
}
function Section({ title, description, children }: {
title: string;
description: string;
children: React.ReactNode;
}) {
return (
<div className="flex items-center">
<div className={styles.left}>
<h4 className="text-lg mb-4">{title}</h4>
<div className={styles.info}>
{description}
</div>
</div>
<div>
{children}
</div>
</div>
)
}
export default withPageTitle('Account - OpenReplay Preferences')(
observer(ProfileSettings),
);

View file

@ -1,32 +0,0 @@
import React from 'react';
import cn from 'classnames';
import { Styles } from '../../common';
import stl from './scale.module.css';
import { useTranslation } from 'react-i18next';
function Scale({ colors }) {
const { t } = useTranslation();
const lastIndex = Styles.compareColors.length - 1;
return (
<div className={cn(stl.bars, 'absolute bottom-0 mb-4')}>
{Styles.compareColors.map((c, i) => (
<div
key={i}
style={{
backgroundColor: c,
width: '6px',
height: '15px',
marginBottom: '1px',
}}
className="flex items-center justify-center"
>
{i === 0 && <div className="text-xs pl-12">{t('Slow')}</div>}
{i === lastIndex && <div className="text-xs pl-12">{t('Fast')}</div>}
</div>
))}
</div>
);
}
export default Scale;

View file

@ -1,55 +0,0 @@
.maps {
height: auto;
width: 110%;
stroke: $gray-medium;
stroke-width: 1;
stroke-linecap: round;
stroke-linejoin: round;
margin-top: -20px;
}
.location {
fill: $gray-light !important;
cursor: pointer;
stroke: #fff;
&:focus,
&:hover {
fill: #2E3ECC !important;
outline: 0;
}
}
.heat_index0 {
fill:$gray-light !important;
}
.heat_index5 {
fill: #B0B8FF !important;
}
.heat_index4 {
fill:#6171FF !important;
}
.heat_index3 {
fill: #394EFF !important;
}
.heat_index2 {
fill: #2E3ECC !important;
}
.heat_index1 {
fill: #222F99 !important;
}
.tooltip {
position: fixed;
padding: 5px;
border: 1px solid $gray-light;
border-radius: 3px;
background-color: white;
font-size: 12px;
line-height: 1.2;
}

View file

@ -1,134 +0,0 @@
import React from 'react';
import { NoContent } from 'UI';
import { observer } from 'mobx-react-lite';
import { numberWithCommas, positionOfTheNumber } from 'App/utils';
import WorldMap from '@svg-maps/world';
import { SVGMap } from 'react-svg-map';
import cn from 'classnames';
import { NO_METRIC_DATA } from 'App/constants/messages';
import { InfoCircleOutlined } from '@ant-design/icons';
import stl from './SpeedIndexByLocation.module.css';
import Scale from './Scale';
import { Styles, AvgLabel } from '../../common';
import { useTranslation } from 'react-i18next';
interface Props {
data?: any;
}
function SpeedIndexByLocation(props: Props) {
const { t } = useTranslation();
const { data } = props;
const wrapper: any = React.useRef(null);
const [tooltipStyle, setTooltipStyle] = React.useState({ display: 'none' });
const [pointedLocation, setPointedLocation] = React.useState<any>(null);
const dataMap: any = React.useMemo(() => {
const _data: any = {};
const max = data.chart?.reduce(
(acc: any, item: any) => Math.max(acc, item.value),
0,
);
const min = data.chart?.reduce(
(acc: any, item: any) => Math.min(acc, item.value),
0,
);
data.chart?.forEach((item: any) => {
if (!item || !item.userCountry) {
return;
}
item.perNumber = positionOfTheNumber(min, max, item.value, 5);
_data[item.userCountry.toLowerCase()] = item;
});
return _data;
}, [data.chart]);
const getLocationClassName = (location: any) => {
const i = dataMap[location.id] ? dataMap[location.id].perNumber : 0;
const cls = stl[`heat_index${i}`];
return cn(stl.location, cls);
};
const getLocationName = (event: any) => {
if (!event) return null;
const id = event.target.attributes.id.value;
const name = event.target.attributes.name.value;
const percentage = dataMap[id] ? dataMap[id].perNumber : 0;
return { name, id, percentage };
};
const handleLocationMouseOver = (event: any) => {
const pointedLocation = getLocationName(event);
setPointedLocation(pointedLocation);
};
const handleLocationMouseOut = () => {
setTooltipStyle({ display: 'none' });
setPointedLocation(null);
};
const handleLocationMouseMove = (event: any) => {
const tooltipStyle = {
display: 'block',
top: event.clientY + 10,
left: event.clientX - 100,
};
setTooltipStyle(tooltipStyle);
};
return (
<NoContent
size="small"
show={false}
style={{ height: '240px' }}
title={
<div className="flex items-center gap-2 text-base font-normal">
<InfoCircleOutlined size={12} /> {NO_METRIC_DATA}
</div>
}
>
<div className="absolute right-0 mr-4 top=0 w-full flex justify-end">
<AvgLabel text="Avg" count={Math.round(data.value)} unit="ms" />
</div>
<Scale colors={Styles.compareColors} />
<div className="map-target" />
<div
style={{
height: '234px',
width: '100%',
margin: '0 auto',
display: 'flex',
}}
ref={wrapper}
>
<SVGMap
map={WorldMap}
className={stl.maps}
locationClassName={getLocationClassName}
onLocationMouseOver={handleLocationMouseOver}
onLocationMouseOut={handleLocationMouseOut}
onLocationMouseMove={handleLocationMouseMove}
/>
</div>
<div className={stl.tooltip} style={tooltipStyle}>
{pointedLocation && (
<>
<div>{pointedLocation.name}</div>
<div>
{t('Avg:')}{' '}
<strong>
{dataMap[pointedLocation.id]
? numberWithCommas(
parseInt(dataMap[pointedLocation.id].value),
)
: 0}
</strong>
</div>
</>
)}
</div>
</NoContent>
);
}
export default observer(SpeedIndexByLocation);

View file

@ -1 +0,0 @@
export { default } from './SpeedIndexByLocation';

View file

@ -1,11 +0,0 @@
.bars {
& div:first-child {
border-top-left-radius: 3px;
border-top-right-radius: 3px;
}
& div:last-child {
border-bottom-left-radius: 3px;
border-bottom-right-radius: 3px;
}
}

View file

@ -1,92 +0,0 @@
import React from 'react';
import ExCard from 'Components/Dashboard/components/DashboardList/NewDashModal/Examples/ExCard';
import InsightsCard from 'Components/Dashboard/Widgets/CustomMetricsWidgets/InsightsCard';
import { InsightIssue } from 'App/mstore/types/widget';
import SessionsPerBrowser from 'Components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
import SpeedIndexByLocation from 'Components/Dashboard/Widgets/PredefinedWidgets/SpeedIndexByLocation';
interface Props {
title: string;
type: string;
onCard: (card: string) => void;
}
function SpeedIndexByLocationExample(props: Props) {
const data = {
value: 1480,
chart: [
{
userCountry: 'AT',
value: 415,
},
{
userCountry: 'PL',
value: 433.1666666666667,
},
{
userCountry: 'FR',
value: 502,
},
{
userCountry: 'IT',
value: 540.4117647058823,
},
{
userCountry: 'TH',
value: 662.0,
},
{
userCountry: 'ES',
value: 740.5454545454545,
},
{
userCountry: 'SG',
value: 889.6666666666666,
},
{
userCountry: 'TW',
value: 1008.0,
},
{
userCountry: 'HU',
value: 1027.0,
},
{
userCountry: 'DE',
value: 1054.4583333333333,
},
{
userCountry: 'BE',
value: 1126.0,
},
{
userCountry: 'TR',
value: 1174.0,
},
{
userCountry: 'US',
value: 1273.3015873015872,
},
{
userCountry: 'GB',
value: 1353.8095238095239,
},
{
userCountry: 'VN',
value: 1473.8181818181818,
},
{
userCountry: 'HK',
value: 1654.6666666666667,
},
],
unit: 'ms',
};
return (
<ExCard {...props}>
<SpeedIndexByLocation data={data} />
</ExCard>
);
}
export default SpeedIndexByLocationExample;

View file

@ -200,7 +200,6 @@ function WidgetChart(props: Props) {
const payload = {
...params,
..._metric.toJson(),
viewType: 'lineChart',
};
fetchMetricChartData(
_metric,

View file

@ -66,8 +66,23 @@ export default observer(WidgetFormNew);
const FilterSection = observer(
({ layout, metric, excludeFilterKeys, excludeCategory }: any) => {
const isTable = metric.metricType === TABLE;
const isHeatMap = metric.metricType === HEATMAP;
const isFunnel = metric.metricType === FUNNEL;
const isInsights = metric.metricType === INSIGHTS;
const isPathAnalysis = metric.metricType === USER_PATH;
const isRetention = metric.metricType === RETENTION;
const canAddSeries = metric.series.length < 3;
const isSingleSeries =
isTable ||
isFunnel ||
isHeatMap ||
isInsights ||
isRetention ||
isPathAnalysis;
const { t } = useTranslation();
const allOpen = layout.startsWith('flex-row');
const allOpen = isSingleSeries || layout.startsWith('flex-row');
const defaultClosed = React.useRef(!allOpen && metric.exists());
const [seriesCollapseState, setSeriesCollapseState] = React.useState<
Record<number, boolean>
@ -84,21 +99,6 @@ const FilterSection = observer(
});
setSeriesCollapseState(defaultSeriesCollapseState);
}, [metric.series]);
const isTable = metric.metricType === TABLE;
const isHeatMap = metric.metricType === HEATMAP;
const isFunnel = metric.metricType === FUNNEL;
const isInsights = metric.metricType === INSIGHTS;
const isPathAnalysis = metric.metricType === USER_PATH;
const isRetention = metric.metricType === RETENTION;
const canAddSeries = metric.series.length < 3;
const isSingleSeries =
isTable ||
isFunnel ||
isHeatMap ||
isInsights ||
isRetention ||
isPathAnalysis;
const collapseAll = () => {
setSeriesCollapseState((seriesCollapseState) => {

View file

@ -18,7 +18,6 @@ import SessionsImpactedBySlowRequests from 'App/components/Dashboard/Widgets/Pre
import SessionsPerBrowser from 'App/components/Dashboard/Widgets/PredefinedWidgets/SessionsPerBrowser';
import { FilterKey } from 'Types/filter/filterType';
import CallWithErrors from '../../Widgets/PredefinedWidgets/CallWithErrors';
import SpeedIndexByLocation from '../../Widgets/PredefinedWidgets/SpeedIndexByLocation';
import ResponseTimeDistribution from '../../Widgets/PredefinedWidgets/ResponseTimeDistribution';
import { useTranslation } from 'react-i18next';
@ -49,8 +48,6 @@ function WidgetPredefinedChart(props: Props) {
return <CallsErrors5xx data={data} metric={metric} />;
case FilterKey.CALLS_ERRORS:
return <CallWithErrors isTemplate={isTemplate} data={data} />;
case FilterKey.SPEED_LOCATION:
return <SpeedIndexByLocation data={data} />;
default:
return (
<div className="h-40 color-red">{t('Widget not supported')}</div>

View file

@ -83,6 +83,7 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
});
const onChartClick = () => {
dashboardStore.setDrillDownPeriod(dashboardStore.period);
// if (!isWidget || isPredefined) return;
props.history.push(
withSiteId(

View file

@ -1,9 +1,7 @@
import { Button, Dropdown, MenuProps, Space, Typography } from 'antd';
import React, { useCallback, useState } from 'react';
import { Button, Dropdown, MenuProps, Typography } from 'antd';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { CaretDownOutlined } from '@ant-design/icons';
import { Languages } from 'lucide-react';
import { Icon } from '../ui';
import { ChevronDown } from 'lucide-react';
const langs = [
{ code: 'en', label: 'English' },
@ -12,14 +10,25 @@ const langs = [
{ code: 'ru', label: 'Русский' },
{ code: 'zh', label: '中國人' },
];
const langLabels = {
en: 'English',
fr: 'Français',
es: 'Español',
ru: 'Русский',
zh: '中國人',
}
function LanguageSwitcher() {
const { i18n } = useTranslation();
const [selected, setSelected] = React.useState(i18n.language);
const handleChangeLanguage = useCallback((lang: string) => {
i18n.changeLanguage(lang);
localStorage.setItem('i18nextLng', lang);
}, []);
const onChange = (val: string) => {
setSelected(val)
}
const handleChangeLanguage = () => {
void i18n.changeLanguage(selected)
localStorage.setItem('i18nextLng', selected)
}
const menuItems: MenuProps['items'] = langs.map((lang) => ({
key: lang.code,
@ -31,21 +40,31 @@ function LanguageSwitcher() {
}));
return (
<Dropdown
menu={{
items: menuItems,
selectable: true,
defaultSelectedKeys: [i18n.language],
style: {
maxHeight: 500,
overflowY: 'auto',
},
onClick: (e) => handleChangeLanguage(e.key),
}}
placement="bottomLeft"
>
<Button icon={<Languages size={12} />} />
</Dropdown>
<div className={'flex flex-col gap-2 align-start'}>
<div className={'font-semibold'}>{i18n.t('Language')}</div>
<Dropdown
menu={{
items: menuItems,
selectable: true,
defaultSelectedKeys: [i18n.language],
style: {
maxHeight: 500,
overflowY: 'auto',
},
onClick: (e) => onChange(e.key),
}}
>
<Button>
<div className={'flex justify-between items-center gap-8'}>
<span>{langLabels[selected]}</span>
<ChevronDown size={14} />
</div>
</Button>
</Dropdown>
<Button className={'w-fit'} onClick={handleChangeLanguage}>
{i18n.t('Update')}
</Button>
</div>
);
}

View file

@ -66,7 +66,8 @@ function Login({
if (event.data.type === 'orspot:logged') {
clearInterval(int);
window.removeEventListener('message', onSpotMsg);
toast.success(t('You have been logged into Spot successfully'));
const msg = t('You have been logged into Spot successfully')
toast.success(msg);
}
};
window.addEventListener('message', onSpotMsg);

View file

@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
import { useTranslation } from 'react-i18next';
const installationCommand = 'npm i @openreplay/tracker';
const usageCode = `import Tracker from '@openreplay/tracker';
const usageCode = `import { tracker } from '@openreplay/tracker';
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});
tracker.start()`;
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});

View file

@ -1,5 +1,4 @@
import React from 'react';
import { findDOMNode } from 'react-dom';
import cn from 'classnames';
import Overlay from 'Components/Session_/Player/Overlay';
import stl from 'Components/Session_/Player/player.module.css';
@ -10,9 +9,7 @@ function Player() {
const playerContext = React.useContext(PlayerContext);
const screenWrapper = React.useRef<HTMLDivElement>(null);
React.useEffect(() => {
const parentElement = findDOMNode(
screenWrapper.current,
) as HTMLDivElement | null;
const parentElement = screenWrapper.current
if (parentElement) {
playerContext.player.attach(parentElement);
}

View file

@ -48,7 +48,7 @@ function ClipPlayerControls({
<Timeline range={range} />
<Button size="small" type="primary" onClick={showFullSession}>
{t('Play Full Session')}
<CirclePlay size={16} />
<CirclePlay size={16} style={{ marginLeft: '0px'}} />
</Button>
</div>
);

View file

@ -1,7 +1,6 @@
import cn from 'classnames';
import { observer } from 'mobx-react-lite';
import React from 'react';
import { findDOMNode } from 'react-dom';
import {
ILivePlayerContext,
@ -40,9 +39,7 @@ function Player({ fullView, isMultiview }: IProps) {
React.useEffect(() => {
if (!closedLive || isMultiview) {
const parentElement = findDOMNode(
screenWrapper.current,
) as HTMLDivElement | null; // TODO: good architecture
const parentElement = screenWrapper.current // TODO: good architecture
if (parentElement) {
playerContext.player.attach(parentElement);
playerContext.player.play();

Some files were not shown because too many files have changed in this diff Show more