Compare commits
57 commits
main
...
update_bat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7894d10509 | ||
|
|
9c6f587b98 | ||
|
|
300a857a5c | ||
|
|
eba22e0efa | ||
|
|
664f6b9014 | ||
|
|
5bbd7cff10 | ||
|
|
6f172d4f01 | ||
|
|
829e1c8bde | ||
|
|
e7d309dadf | ||
|
|
4bac12308a | ||
|
|
2aba1d9a52 | ||
|
|
1f4e32e4f2 | ||
|
|
49f98967d6 | ||
|
|
356fa02094 | ||
|
|
a8e47e59ad | ||
|
|
22ee13e641 | ||
|
|
c760d29fb4 | ||
|
|
d77a518cf0 | ||
|
|
f595a5932a | ||
|
|
e04c2aa251 | ||
|
|
e6eb41536d | ||
|
|
4b3ad60565 | ||
|
|
90669b0604 | ||
|
|
f4bf1b8960 | ||
|
|
70423c6d8e | ||
|
|
ae313c17d4 | ||
|
|
0e45fa53ad | ||
|
|
de2f87270a | ||
|
|
fe20f83130 | ||
|
|
d04e6686ca | ||
|
|
6adb45e15f | ||
|
|
a1337faeee | ||
|
|
7e065ab02f | ||
|
|
1e2dde09b4 | ||
|
|
3cdfe76134 | ||
|
|
39855651d5 | ||
|
|
dd469d2349 | ||
|
|
3d448320bf | ||
|
|
7b0771a581 | ||
|
|
988b396223 | ||
|
|
fa3b585785 | ||
|
|
91e0ebeb56 | ||
|
|
8e68eb9a20 | ||
|
|
13bd3d9121 | ||
|
|
048ae0913c | ||
|
|
73fff8b817 | ||
|
|
605fa96a34 | ||
|
|
2cb33d7894 | ||
|
|
15d427418d | ||
|
|
ed3e553726 | ||
|
|
7eace68de6 | ||
|
|
8009882cef | ||
|
|
7365d8639c | ||
|
|
4c967d4bc1 | ||
|
|
3fdf799bd7 | ||
|
|
9aca716e6b | ||
|
|
cf9ecdc9a4 |
258 changed files with 3052 additions and 4843 deletions
122
.github/workflows/assist-server-ee.yaml
vendored
122
.github/workflows/assist-server-ee.yaml
vendored
|
|
@ -1,122 +0,0 @@
|
||||||
# This action will push the assist changes to aws
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
skip_security_checks:
|
|
||||||
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- dev
|
|
||||||
paths:
|
|
||||||
- "ee/assist-server/**"
|
|
||||||
|
|
||||||
name: Build and Deploy Assist-Server EE
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
name: Deploy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
# We need to diff with old commit
|
|
||||||
# to see which workers got changed.
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
- uses: ./.github/composite-actions/update-keys
|
|
||||||
with:
|
|
||||||
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
|
|
||||||
assist_key: ${{ secrets.ASSIST_KEY }}
|
|
||||||
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
|
|
||||||
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
|
|
||||||
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
|
|
||||||
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
|
|
||||||
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
|
|
||||||
license_key: ${{ secrets.EE_LICENSE_KEY }}
|
|
||||||
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
|
|
||||||
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
|
|
||||||
pg_password: ${{ secrets.EE_PG_PASSWORD }}
|
|
||||||
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
|
|
||||||
name: Update Keys
|
|
||||||
|
|
||||||
- name: Docker login
|
|
||||||
run: |
|
|
||||||
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
|
|
||||||
|
|
||||||
- uses: azure/k8s-set-context@v1
|
|
||||||
with:
|
|
||||||
method: kubeconfig
|
|
||||||
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
|
|
||||||
id: setcontext
|
|
||||||
|
|
||||||
- name: Building and Pushing Assist-Server image
|
|
||||||
id: build-image
|
|
||||||
env:
|
|
||||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
|
||||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
|
|
||||||
ENVIRONMENT: staging
|
|
||||||
run: |
|
|
||||||
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
|
|
||||||
cd assist-server
|
|
||||||
PUSH_IMAGE=0 bash -x ./build.sh ee
|
|
||||||
[[ "x$skip_security_checks" == "xtrue" ]] || {
|
|
||||||
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
|
|
||||||
images=("assist-server")
|
|
||||||
for image in ${images[*]};do
|
|
||||||
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
|
|
||||||
done
|
|
||||||
err_code=$?
|
|
||||||
[[ $err_code -ne 0 ]] && {
|
|
||||||
exit $err_code
|
|
||||||
}
|
|
||||||
} && {
|
|
||||||
echo "Skipping Security Checks"
|
|
||||||
}
|
|
||||||
images=("assist-server")
|
|
||||||
for image in ${images[*]};do
|
|
||||||
docker push $DOCKER_REPO/$image:$IMAGE_TAG
|
|
||||||
done
|
|
||||||
- name: Creating old image input
|
|
||||||
run: |
|
|
||||||
#
|
|
||||||
# Create yaml with existing image tags
|
|
||||||
#
|
|
||||||
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
|
|
||||||
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
|
|
||||||
|
|
||||||
echo > /tmp/image_override.yaml
|
|
||||||
|
|
||||||
for line in `cat /tmp/image_tag.txt`;
|
|
||||||
do
|
|
||||||
image_array=($(echo "$line" | tr ':' '\n'))
|
|
||||||
cat <<EOF >> /tmp/image_override.yaml
|
|
||||||
${image_array[0]}:
|
|
||||||
image:
|
|
||||||
# We've to strip off the -ee, as helm will append it.
|
|
||||||
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
|
|
||||||
EOF
|
|
||||||
done
|
|
||||||
- name: Deploy to kubernetes
|
|
||||||
run: |
|
|
||||||
pwd
|
|
||||||
cd scripts/helmcharts/
|
|
||||||
|
|
||||||
# Update changed image tag
|
|
||||||
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
|
|
||||||
|
|
||||||
cat /tmp/image_override.yaml
|
|
||||||
# Deploy command
|
|
||||||
mkdir -p /tmp/charts
|
|
||||||
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
|
|
||||||
rm -rf openreplay/charts/*
|
|
||||||
mv /tmp/charts/* openreplay/charts/
|
|
||||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
|
||||||
env:
|
|
||||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
|
||||||
# We're not passing -ee flag, because helm will add that.
|
|
||||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
|
||||||
ENVIRONMENT: staging
|
|
||||||
189
.github/workflows/patch-build-old.yaml
vendored
189
.github/workflows/patch-build-old.yaml
vendored
|
|
@ -1,189 +0,0 @@
|
||||||
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
services:
|
|
||||||
description: 'Comma separated names of services to build(in small letters).'
|
|
||||||
required: true
|
|
||||||
default: 'chalice,frontend'
|
|
||||||
tag:
|
|
||||||
description: 'Tag to update.'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
branch:
|
|
||||||
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
name: Build Patch from old tag
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
|
||||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
fetch-depth: 4
|
|
||||||
ref: ${{ github.event.inputs.tag }}
|
|
||||||
|
|
||||||
- name: Set Remote with GITHUB_TOKEN
|
|
||||||
run: |
|
|
||||||
git config --unset http.https://github.com/.extraheader
|
|
||||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
|
||||||
|
|
||||||
- name: Create backup tag with timestamp
|
|
||||||
run: |
|
|
||||||
set -e # Exit immediately if a command exits with a non-zero status
|
|
||||||
TIMESTAMP=$(date +%Y%m%d%H%M%S)
|
|
||||||
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
|
|
||||||
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
|
|
||||||
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
|
|
||||||
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
|
|
||||||
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
|
|
||||||
echo "Created backup tag: $BACKUP_TAG"
|
|
||||||
|
|
||||||
# Get the oldest commit date from the last 3 commits in raw format
|
|
||||||
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
|
|
||||||
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
|
|
||||||
# Add 1 second to the timestamp
|
|
||||||
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
|
|
||||||
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
|
|
||||||
- name: Setup yq
|
|
||||||
uses: mikefarah/yq@master
|
|
||||||
|
|
||||||
# Configure AWS credentials for the first registry
|
|
||||||
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
|
|
||||||
uses: aws-actions/configure-aws-credentials@v1
|
|
||||||
with:
|
|
||||||
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
|
|
||||||
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
|
|
||||||
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
|
|
||||||
|
|
||||||
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
|
|
||||||
id: login-ecr-arm
|
|
||||||
run: |
|
|
||||||
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
|
|
||||||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
|
||||||
|
|
||||||
- uses: depot/setup-action@v1
|
|
||||||
- name: Get HEAD Commit ID
|
|
||||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
|
||||||
- name: Define Branch Name
|
|
||||||
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
id: build-image
|
|
||||||
env:
|
|
||||||
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
|
||||||
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
|
||||||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
|
||||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
|
||||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
|
||||||
run: |
|
|
||||||
set -exo pipefail
|
|
||||||
git config --local user.email "action@github.com"
|
|
||||||
git config --local user.name "GitHub Action"
|
|
||||||
git checkout -b $BRANCH_NAME
|
|
||||||
working_dir=$(pwd)
|
|
||||||
function image_version(){
|
|
||||||
local service=$1
|
|
||||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
|
||||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
|
||||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
|
||||||
echo $new_version
|
|
||||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
|
||||||
}
|
|
||||||
function clone_msaas() {
|
|
||||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
|
||||||
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
|
||||||
cd $MSAAS_REPO_FOLDER
|
|
||||||
cd openreplay && git fetch origin && git checkout $INPUT_TAG
|
|
||||||
git log -1
|
|
||||||
cd $MSAAS_REPO_FOLDER
|
|
||||||
bash git-init.sh
|
|
||||||
git checkout
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function build_managed() {
|
|
||||||
local service=$1
|
|
||||||
local version=$2
|
|
||||||
echo building managed
|
|
||||||
clone_msaas
|
|
||||||
if [[ $service == 'chalice' ]]; then
|
|
||||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
|
||||||
else
|
|
||||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
|
||||||
fi
|
|
||||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
|
||||||
}
|
|
||||||
# Checking for backend images
|
|
||||||
ls backend/cmd >> /tmp/backend.txt
|
|
||||||
echo Services: "${{ github.event.inputs.services }}"
|
|
||||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
|
||||||
BUILD_SCRIPT_NAME="build.sh"
|
|
||||||
# Build FOSS
|
|
||||||
for SERVICE in "${SERVICES[@]}"; do
|
|
||||||
# Check if service is backend
|
|
||||||
if grep -q $SERVICE /tmp/backend.txt; then
|
|
||||||
cd backend
|
|
||||||
foss_build_args="nil $SERVICE"
|
|
||||||
ee_build_args="ee $SERVICE"
|
|
||||||
else
|
|
||||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
|
||||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
|
||||||
ee_build_args="ee"
|
|
||||||
fi
|
|
||||||
version=$(image_version $SERVICE)
|
|
||||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
|
||||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
|
||||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
|
||||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
|
||||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
|
||||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
|
||||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
|
||||||
else
|
|
||||||
build_managed $SERVICE $version
|
|
||||||
fi
|
|
||||||
cd $working_dir
|
|
||||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
|
||||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
|
||||||
git add $chart_path
|
|
||||||
git commit -m "Increment $SERVICE chart version"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Change commit timestamp
|
|
||||||
run: |
|
|
||||||
# Convert the timestamp to a date format git can understand
|
|
||||||
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
|
|
||||||
echo "Setting commit date to: $NEW_DATE"
|
|
||||||
|
|
||||||
# Amend the commit with the new date
|
|
||||||
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
|
|
||||||
|
|
||||||
# Verify the change
|
|
||||||
git log -1 --pretty=format:"Commit now dated: %cD"
|
|
||||||
|
|
||||||
# git tag and push
|
|
||||||
git tag $INPUT_TAG -f
|
|
||||||
git push origin $INPUT_TAG -f
|
|
||||||
|
|
||||||
|
|
||||||
# - name: Debug Job
|
|
||||||
# if: ${{ failure() }}
|
|
||||||
# uses: mxschmitt/action-tmate@v3
|
|
||||||
# env:
|
|
||||||
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
|
||||||
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
|
||||||
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
|
||||||
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
|
||||||
# MSAAS_REPO_FOLDER: /tmp/msaas
|
|
||||||
# with:
|
|
||||||
# limit-access-to-actor: true
|
|
||||||
246
.github/workflows/patch-build.yaml
vendored
246
.github/workflows/patch-build.yaml
vendored
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||||
inputs:
|
inputs:
|
||||||
services:
|
services:
|
||||||
description: 'Comma separated names of services to build(in small letters).'
|
description: 'Comma separated names of services to build(in small letters).'
|
||||||
|
|
@ -19,20 +20,12 @@ jobs:
|
||||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 1
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||||
if: github.ref != 'refs/heads/main'
|
|
||||||
run: |
|
run: |
|
||||||
git remote -v
|
git pull --rebase origin main
|
||||||
git config --global user.email "action@github.com"
|
|
||||||
git config --global user.name "GitHub Action"
|
|
||||||
git config --global rebase.autoStash true
|
|
||||||
git fetch origin main:main
|
|
||||||
git rebase main
|
|
||||||
git log -3
|
|
||||||
|
|
||||||
- name: Downloading yq
|
- name: Downloading yq
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -55,8 +48,6 @@ jobs:
|
||||||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||||
|
|
||||||
- uses: depot/setup-action@v1
|
- uses: depot/setup-action@v1
|
||||||
env:
|
|
||||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
|
||||||
- name: Get HEAD Commit ID
|
- name: Get HEAD Commit ID
|
||||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
- name: Define Branch Name
|
- name: Define Branch Name
|
||||||
|
|
@ -74,168 +65,78 @@ jobs:
|
||||||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||||
SERVICES_INPUT: ${{ github.event.inputs.services }}
|
|
||||||
run: |
|
run: |
|
||||||
#!/bin/bash
|
set -exo pipefail
|
||||||
set -euo pipefail
|
git config --local user.email "action@github.com"
|
||||||
|
git config --local user.name "GitHub Action"
|
||||||
# Configuration
|
git checkout -b $BRANCH_NAME
|
||||||
readonly WORKING_DIR=$(pwd)
|
working_dir=$(pwd)
|
||||||
readonly BUILD_SCRIPT_NAME="build.sh"
|
function image_version(){
|
||||||
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
|
local service=$1
|
||||||
|
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||||
# Initialize git configuration
|
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||||
setup_git() {
|
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||||
git config --local user.email "action@github.com"
|
echo $new_version
|
||||||
git config --local user.name "GitHub Action"
|
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||||
git checkout -b "$BRANCH_NAME"
|
|
||||||
}
|
}
|
||||||
|
function clone_msaas() {
|
||||||
# Get and increment image version
|
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||||
image_version() {
|
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||||
local service=$1
|
cd $MSAAS_REPO_FOLDER
|
||||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
|
||||||
local current_version new_version
|
git log -1
|
||||||
|
cd $MSAAS_REPO_FOLDER
|
||||||
current_version=$(yq eval '.AppVersion' "$chart_path")
|
bash git-init.sh
|
||||||
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
|
git checkout
|
||||||
echo "$new_version"
|
}
|
||||||
}
|
}
|
||||||
|
function build_managed() {
|
||||||
# Clone MSAAS repository if not exists
|
local service=$1
|
||||||
clone_msaas() {
|
local version=$2
|
||||||
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
|
echo building managed
|
||||||
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
|
clone_msaas
|
||||||
cd "$MSAAS_REPO_FOLDER"
|
if [[ $service == 'chalice' ]]; then
|
||||||
cd openreplay && git fetch origin && git checkout main
|
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||||
git log -1
|
else
|
||||||
cd "$MSAAS_REPO_FOLDER"
|
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||||
bash git-init.sh
|
fi
|
||||||
git checkout
|
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
# Checking for backend images
|
||||||
# Build managed services
|
ls backend/cmd >> /tmp/backend.txt
|
||||||
build_managed() {
|
echo Services: "${{ github.event.inputs.services }}"
|
||||||
local service=$1
|
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||||
local version=$2
|
BUILD_SCRIPT_NAME="build.sh"
|
||||||
|
# Build FOSS
|
||||||
echo "Building managed service: $service"
|
for SERVICE in "${SERVICES[@]}"; do
|
||||||
clone_msaas
|
# Check if service is backend
|
||||||
|
if grep -q $SERVICE /tmp/backend.txt; then
|
||||||
if [[ $service == 'chalice' ]]; then
|
cd backend
|
||||||
cd "$MSAAS_REPO_FOLDER/openreplay/api"
|
foss_build_args="nil $SERVICE"
|
||||||
else
|
ee_build_args="ee $SERVICE"
|
||||||
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
|
else
|
||||||
fi
|
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||||
|
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||||
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
|
ee_build_args="ee"
|
||||||
|
fi
|
||||||
echo "Executing: $build_cmd"
|
version=$(image_version $SERVICE)
|
||||||
if ! eval "$build_cmd" 2>&1; then
|
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||||
echo "Build failed for $service"
|
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||||
exit 1
|
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||||
fi
|
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||||
}
|
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||||
|
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||||
# Build service with given arguments
|
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||||
build_service() {
|
else
|
||||||
local service=$1
|
build_managed $SERVICE $version
|
||||||
local version=$2
|
fi
|
||||||
local build_args=$3
|
cd $working_dir
|
||||||
local build_script=${4:-$BUILD_SCRIPT_NAME}
|
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||||
|
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||||
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
|
git add $chart_path
|
||||||
echo "Executing: $command"
|
git commit -m "Increment $SERVICE chart version"
|
||||||
eval "$command"
|
git push --set-upstream origin $BRANCH_NAME
|
||||||
}
|
done
|
||||||
|
|
||||||
# Update chart version and commit changes
|
|
||||||
update_chart_version() {
|
|
||||||
local service=$1
|
|
||||||
local version=$2
|
|
||||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
|
||||||
|
|
||||||
# Ensure we're in the original working directory/repository
|
|
||||||
cd "$WORKING_DIR"
|
|
||||||
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
|
|
||||||
git add "$chart_path"
|
|
||||||
git commit -m "Increment $service chart version to $version"
|
|
||||||
git push --set-upstream origin "$BRANCH_NAME"
|
|
||||||
cd -
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
main() {
|
|
||||||
setup_git
|
|
||||||
|
|
||||||
# Get backend services list
|
|
||||||
ls backend/cmd >"$BACKEND_SERVICES_FILE"
|
|
||||||
|
|
||||||
# Parse services input (fix for GitHub Actions syntax)
|
|
||||||
echo "Services: ${SERVICES_INPUT:-$1}"
|
|
||||||
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
|
|
||||||
|
|
||||||
# Process each service
|
|
||||||
for service in "${services[@]}"; do
|
|
||||||
echo "Processing service: $service"
|
|
||||||
cd "$WORKING_DIR"
|
|
||||||
|
|
||||||
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
|
|
||||||
|
|
||||||
# Determine build configuration based on service type
|
|
||||||
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
|
|
||||||
# Backend service
|
|
||||||
cd backend
|
|
||||||
foss_build_args="nil $service"
|
|
||||||
ee_build_args="ee $service"
|
|
||||||
else
|
|
||||||
# Non-backend service
|
|
||||||
case "$service" in
|
|
||||||
chalice | alerts | crons)
|
|
||||||
cd "$WORKING_DIR/api"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
cd "$service"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# Special build scripts for alerts/crons
|
|
||||||
if [[ $service == 'alerts' || $service == 'crons' ]]; then
|
|
||||||
build_script="build_${service}.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
ee_build_args="ee"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get version and build
|
|
||||||
local version
|
|
||||||
version=$(image_version "$service")
|
|
||||||
|
|
||||||
# Build FOSS and EE versions
|
|
||||||
build_service "$service" "$version" "$foss_build_args"
|
|
||||||
build_service "$service" "${version}-ee" "$ee_build_args"
|
|
||||||
|
|
||||||
# Build managed version for specific services
|
|
||||||
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
|
|
||||||
echo "Nothing to build in managed for service $service"
|
|
||||||
else
|
|
||||||
build_managed "$service" "$version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Update chart and commit
|
|
||||||
update_chart_version "$service" "$version"
|
|
||||||
done
|
|
||||||
cd "$WORKING_DIR"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
rm -f "$BACKEND_SERVICES_FILE"
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "Working directory: $WORKING_DIR"
|
|
||||||
# Run main function with all arguments
|
|
||||||
main "$SERVICES_INPUT"
|
|
||||||
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: repo-sync/pull-request@v2
|
uses: repo-sync/pull-request@v2
|
||||||
|
|
@ -246,7 +147,8 @@ jobs:
|
||||||
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
|
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
|
||||||
pr_body: |
|
pr_body: |
|
||||||
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
|
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
|
||||||
Once this PR is merged, tag update job will run automatically.
|
Once this PR is merged, To update the latest tag, run the following workflow.
|
||||||
|
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
|
||||||
|
|
||||||
# - name: Debug Job
|
# - name: Debug Job
|
||||||
# if: ${{ failure() }}
|
# if: ${{ failure() }}
|
||||||
|
|
|
||||||
47
.github/workflows/update-tag.yaml
vendored
47
.github/workflows/update-tag.yaml
vendored
|
|
@ -1,42 +1,35 @@
|
||||||
on:
|
on:
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
types: [closed]
|
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch."
|
||||||
branches:
|
inputs:
|
||||||
- main
|
services:
|
||||||
name: Release tag update --force
|
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false"
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
name: Force Push tag with main branch HEAD
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
name: Build Patch from main
|
name: Build Patch from main
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
|
env:
|
||||||
|
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||||
|
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get latest release tag using GitHub API
|
|
||||||
id: get-latest-tag
|
|
||||||
run: |
|
|
||||||
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
|
||||||
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
|
|
||||||
| jq -r .tag_name)
|
|
||||||
|
|
||||||
# Fallback to git command if API doesn't return a tag
|
|
||||||
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
|
|
||||||
echo "Not found latest tag"
|
|
||||||
exit 100
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
|
|
||||||
echo "Latest tag: $LATEST_TAG"
|
|
||||||
|
|
||||||
- name: Set Remote with GITHUB_TOKEN
|
- name: Set Remote with GITHUB_TOKEN
|
||||||
run: |
|
run: |
|
||||||
git config --unset http.https://github.com/.extraheader
|
git config --unset http.https://github.com/.extraheader
|
||||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
|
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||||
|
|
||||||
- name: Push main branch to tag
|
- name: Push main branch to tag
|
||||||
run: |
|
run: |
|
||||||
|
git fetch --tags
|
||||||
git checkout main
|
git checkout main
|
||||||
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
|
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force
|
||||||
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
|
# - name: Debug Job
|
||||||
|
# if: ${{ failure() }}
|
||||||
|
# uses: mxschmitt/action-tmate@v3
|
||||||
|
# with:
|
||||||
|
# limit-access-to-actor: true
|
||||||
|
|
|
||||||
11
api/Pipfile
11
api/Pipfile
|
|
@ -6,16 +6,15 @@ name = "pypi"
|
||||||
[packages]
|
[packages]
|
||||||
urllib3 = "==2.3.0"
|
urllib3 = "==2.3.0"
|
||||||
requests = "==2.32.3"
|
requests = "==2.32.3"
|
||||||
boto3 = "==1.36.12"
|
boto3 = "==1.37.16"
|
||||||
pyjwt = "==2.10.1"
|
pyjwt = "==2.10.1"
|
||||||
psycopg2-binary = "==2.9.10"
|
psycopg2-binary = "==2.9.10"
|
||||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
|
||||||
clickhouse-connect = "==0.8.15"
|
clickhouse-connect = "==0.8.15"
|
||||||
elasticsearch = "==8.17.1"
|
elasticsearch = "==8.17.2"
|
||||||
jira = "==3.8.0"
|
jira = "==3.8.0"
|
||||||
cachetools = "==5.5.1"
|
cachetools = "==5.5.2"
|
||||||
fastapi = "==0.115.8"
|
fastapi = "==0.115.11"
|
||||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||||
python-decouple = "==3.8"
|
python-decouple = "==3.8"
|
||||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ from chalicelib.utils import helper
|
||||||
from chalicelib.utils import pg_client, ch_client
|
from chalicelib.utils import pg_client, ch_client
|
||||||
from crons import core_crons, core_dynamic_crons
|
from crons import core_crons, core_dynamic_crons
|
||||||
from routers import core, core_dynamic
|
from routers import core, core_dynamic
|
||||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||||
|
|
||||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||||
print(f">Loglevel set to: {loglevel}")
|
print(f">Loglevel set to: {loglevel}")
|
||||||
|
|
@ -129,6 +129,6 @@ app.include_router(spot.public_app)
|
||||||
app.include_router(spot.app)
|
app.include_router(spot.app)
|
||||||
app.include_router(spot.app_apikey)
|
app.include_router(spot.app_apikey)
|
||||||
|
|
||||||
app.include_router(product_anaytics.public_app)
|
app.include_router(product_analytics.public_app, prefix="/pa")
|
||||||
app.include_router(product_anaytics.app)
|
app.include_router(product_analytics.app, prefix="/pa")
|
||||||
app.include_router(product_anaytics.app_apikey)
|
app.include_router(product_analytics.app_apikey, prefix="/pa")
|
||||||
|
|
|
||||||
|
|
@ -85,8 +85,7 @@ def __generic_query(typename, value_length=None):
|
||||||
ORDER BY value"""
|
ORDER BY value"""
|
||||||
|
|
||||||
if value_length is None or value_length > 2:
|
if value_length is None or value_length > 2:
|
||||||
return f"""SELECT DISTINCT ON(value,type) value, type
|
return f"""(SELECT DISTINCT value, type
|
||||||
((SELECT DISTINCT value, type
|
|
||||||
FROM {TABLE}
|
FROM {TABLE}
|
||||||
WHERE
|
WHERE
|
||||||
project_id = %(project_id)s
|
project_id = %(project_id)s
|
||||||
|
|
@ -102,7 +101,7 @@ def __generic_query(typename, value_length=None):
|
||||||
AND type='{typename.upper()}'
|
AND type='{typename.upper()}'
|
||||||
AND value ILIKE %(value)s
|
AND value ILIKE %(value)s
|
||||||
ORDER BY value
|
ORDER BY value
|
||||||
LIMIT 5)) AS raw;"""
|
LIMIT 5);"""
|
||||||
return f"""SELECT DISTINCT value, type
|
return f"""SELECT DISTINCT value, type
|
||||||
FROM {TABLE}
|
FROM {TABLE}
|
||||||
WHERE
|
WHERE
|
||||||
|
|
@ -327,7 +326,7 @@ def __search_metadata(project_id, value, key=None, source=None):
|
||||||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||||
with pg_client.PostgresClient() as cur:
|
with pg_client.PostgresClient() as cur:
|
||||||
cur.execute(cur.mogrify(f"""\
|
cur.execute(cur.mogrify(f"""\
|
||||||
SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
SELECT key, value, 'METADATA' AS TYPE
|
||||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||||
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||||
|
|
|
||||||
|
|
@ -338,14 +338,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
||||||
SELECT details.error_id as error_id,
|
SELECT details.error_id as error_id,
|
||||||
name, message, users, total,
|
name, message, users, total,
|
||||||
sessions, last_occurrence, first_occurrence, chart
|
sessions, last_occurrence, first_occurrence, chart
|
||||||
FROM (SELECT error_id,
|
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||||
JSONExtractString(toString(`$properties`), 'name') AS name,
|
JSONExtractString(toString(`$properties`), 'name') AS name,
|
||||||
JSONExtractString(toString(`$properties`), 'message') AS message,
|
JSONExtractString(toString(`$properties`), 'message') AS message,
|
||||||
COUNT(DISTINCT user_id) AS users,
|
COUNT(DISTINCT user_id) AS users,
|
||||||
COUNT(DISTINCT events.session_id) AS sessions,
|
COUNT(DISTINCT events.session_id) AS sessions,
|
||||||
MAX(created_at) AS max_datetime,
|
MAX(created_at) AS max_datetime,
|
||||||
MIN(created_at) AS min_datetime,
|
MIN(created_at) AS min_datetime,
|
||||||
COUNT(DISTINCT error_id)
|
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id'))
|
||||||
OVER() AS total
|
OVER() AS total
|
||||||
FROM {MAIN_EVENTS_TABLE} AS events
|
FROM {MAIN_EVENTS_TABLE} AS events
|
||||||
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
|
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
|
||||||
|
|
@ -357,7 +357,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
||||||
GROUP BY error_id, name, message
|
GROUP BY error_id, name, message
|
||||||
ORDER BY {sort} {order}
|
ORDER BY {sort} {order}
|
||||||
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
|
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
|
||||||
INNER JOIN (SELECT error_id,
|
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||||
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
|
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
|
||||||
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
|
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
|
||||||
FROM {MAIN_EVENTS_TABLE}
|
FROM {MAIN_EVENTS_TABLE}
|
||||||
|
|
@ -366,7 +366,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
||||||
GROUP BY error_id) AS time_details
|
GROUP BY error_id) AS time_details
|
||||||
ON details.error_id=time_details.error_id
|
ON details.error_id=time_details.error_id
|
||||||
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
|
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
|
||||||
FROM (SELECT error_id,
|
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||||
gs.generate_series AS timestamp,
|
gs.generate_series AS timestamp,
|
||||||
COUNT(DISTINCT session_id) AS count
|
COUNT(DISTINCT session_id) AS count
|
||||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||||
|
|
|
||||||
|
|
@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
|
||||||
cur.execute(
|
cur.execute(
|
||||||
cur.mogrify(
|
cur.mogrify(
|
||||||
"""SELECT username, token, url
|
"""SELECT username, token, url
|
||||||
FROM public.jira_cloud
|
FROM public.jira_cloud
|
||||||
WHERE user_id = %(user_id)s;""",
|
WHERE user_id=%(user_id)s;""",
|
||||||
{"user_id": self._user_id})
|
{"user_id": self._user_id})
|
||||||
)
|
)
|
||||||
data = helper.dict_to_camel_case(cur.fetchone())
|
data = helper.dict_to_camel_case(cur.fetchone())
|
||||||
|
|
@ -95,9 +95,10 @@ class JIRAIntegration(base.BaseIntegration):
|
||||||
def add(self, username, token, url, obfuscate=False):
|
def add(self, username, token, url, obfuscate=False):
|
||||||
with pg_client.PostgresClient() as cur:
|
with pg_client.PostgresClient() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
cur.mogrify(""" \
|
cur.mogrify("""\
|
||||||
INSERT INTO public.jira_cloud(username, token, user_id, url)
|
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||||
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
|
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||||
|
RETURNING username, token, url;""",
|
||||||
{"user_id": self._user_id, "username": username,
|
{"user_id": self._user_id, "username": username,
|
||||||
"token": token, "url": url})
|
"token": token, "url": url})
|
||||||
)
|
)
|
||||||
|
|
@ -111,10 +112,9 @@ class JIRAIntegration(base.BaseIntegration):
|
||||||
def delete(self):
|
def delete(self):
|
||||||
with pg_client.PostgresClient() as cur:
|
with pg_client.PostgresClient() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
cur.mogrify(""" \
|
cur.mogrify("""\
|
||||||
DELETE
|
DELETE FROM public.jira_cloud
|
||||||
FROM public.jira_cloud
|
WHERE user_id=%(user_id)s;""",
|
||||||
WHERE user_id = %(user_id)s;""",
|
|
||||||
{"user_id": self._user_id})
|
{"user_id": self._user_id})
|
||||||
)
|
)
|
||||||
return {"state": "success"}
|
return {"state": "success"}
|
||||||
|
|
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
|
||||||
changes={
|
changes={
|
||||||
"username": data.username,
|
"username": data.username,
|
||||||
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
||||||
else self.integration["token"],
|
else self.integration.token,
|
||||||
"url": str(data.url)
|
"url": str(data.url)
|
||||||
},
|
},
|
||||||
obfuscate=True
|
obfuscate=True
|
||||||
|
|
|
||||||
|
|
@ -85,9 +85,6 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
|
||||||
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
|
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
|
||||||
# if start-point is selected, the selected event is ranked n°1
|
# if start-point is selected, the selected event is ranked n°1
|
||||||
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
|
||||||
if not data.hide_excess:
|
|
||||||
data.hide_excess = True
|
|
||||||
data.rows = 50
|
|
||||||
sub_events = []
|
sub_events = []
|
||||||
start_points_conditions = []
|
start_points_conditions = []
|
||||||
step_0_conditions = []
|
step_0_conditions = []
|
||||||
|
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
from chalicelib.utils.ch_client import ClickHouseClient
|
|
||||||
|
|
||||||
|
|
||||||
def search_events(project_id: int, data: dict):
|
|
||||||
with ClickHouseClient() as ch_client:
|
|
||||||
r = ch_client.format(
|
|
||||||
"""SELECT *
|
|
||||||
FROM taha.events
|
|
||||||
WHERE project_id=%(project_id)s
|
|
||||||
ORDER BY created_at;""",
|
|
||||||
params={"project_id": project_id})
|
|
||||||
x = ch_client.execute(r)
|
|
||||||
|
|
||||||
return x
|
|
||||||
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
0
api/chalicelib/core/product_analytics/__init__.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
108
api/chalicelib/core/product_analytics/events.py
Normal file
|
|
@ -0,0 +1,108 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import schemas
|
||||||
|
from chalicelib.utils import helper
|
||||||
|
from chalicelib.utils import sql_helper as sh
|
||||||
|
from chalicelib.utils.ch_client import ClickHouseClient
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_events(project_id: int):
|
||||||
|
with ClickHouseClient() as ch_client:
|
||||||
|
r = ch_client.format(
|
||||||
|
"""SELECT event_name, display_name
|
||||||
|
FROM product_analytics.all_events
|
||||||
|
WHERE project_id=%(project_id)s
|
||||||
|
ORDER BY display_name;""",
|
||||||
|
parameters={"project_id": project_id})
|
||||||
|
x = ch_client.execute(r)
|
||||||
|
|
||||||
|
return helper.list_to_camel_case(x)
|
||||||
|
|
||||||
|
|
||||||
|
def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema):
|
||||||
|
with ClickHouseClient() as ch_client:
|
||||||
|
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||||
|
"projectId": project_id, "limit": data.limit, "offset": (data.page - 1) * data.limit}
|
||||||
|
|
||||||
|
constraints = ["project_id = %(projectId)s",
|
||||||
|
"created_at >= toDateTime(%(startDate)s/1000)",
|
||||||
|
"created_at <= toDateTime(%(endDate)s/1000)"]
|
||||||
|
for i, f in enumerate(data.filters):
|
||||||
|
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||||
|
f_k = f"f_value{i}"
|
||||||
|
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||||
|
op = sh.get_sql_operator(f.operator)
|
||||||
|
is_any = sh.isAny_opreator(f.operator)
|
||||||
|
is_undefined = sh.isUndefined_operator(f.operator)
|
||||||
|
full_args = {**full_args, f_k: sh.single_value(f.value), **sh.multi_values(f.value, value_key=f_k)}
|
||||||
|
if f.is_predefined:
|
||||||
|
column = f.name
|
||||||
|
else:
|
||||||
|
column = f"properties.{f.name}"
|
||||||
|
|
||||||
|
if is_any:
|
||||||
|
condition = f"isNotNull({column})"
|
||||||
|
elif is_undefined:
|
||||||
|
condition = f"isNull({column})"
|
||||||
|
else:
|
||||||
|
condition = sh.multi_conditions(f"{column} {op} %({f_k})s", f.value, value_key=f_k)
|
||||||
|
constraints.append(condition)
|
||||||
|
|
||||||
|
ev_constraints = []
|
||||||
|
for i, e in enumerate(data.events):
|
||||||
|
e_k = f"e_value{i}"
|
||||||
|
full_args = {**full_args, e_k: e.event_name}
|
||||||
|
condition = f"`$event_name` = %({e_k})s"
|
||||||
|
sub_conditions = []
|
||||||
|
if len(e.properties.filters) > 0:
|
||||||
|
for j, f in enumerate(e.properties.filters):
|
||||||
|
p_k = f"e_{i}_p_{j}"
|
||||||
|
full_args = {**full_args, **sh.multi_values(f.value, value_key=p_k)}
|
||||||
|
if f.is_predefined:
|
||||||
|
sub_condition = f"{f.name} {op} %({p_k})s"
|
||||||
|
else:
|
||||||
|
sub_condition = f"properties.{f.name} {op} %({p_k})s"
|
||||||
|
sub_conditions.append(sh.multi_conditions(sub_condition, f.value, value_key=p_k))
|
||||||
|
if len(sub_conditions) > 0:
|
||||||
|
condition += " AND ("
|
||||||
|
for j, c in enumerate(sub_conditions):
|
||||||
|
if j > 0:
|
||||||
|
condition += " " + e.properties.operators[j - 1] + " " + c
|
||||||
|
else:
|
||||||
|
condition += c
|
||||||
|
condition += ")"
|
||||||
|
|
||||||
|
ev_constraints.append(condition)
|
||||||
|
|
||||||
|
constraints.append("(" + " OR ".join(ev_constraints) + ")")
|
||||||
|
query = ch_client.format(
|
||||||
|
f"""SELECT COUNT(1) OVER () AS total,
|
||||||
|
event_id,
|
||||||
|
`$event_name`,
|
||||||
|
created_at,
|
||||||
|
`distinct_id`,
|
||||||
|
`$browser`,
|
||||||
|
`$import`,
|
||||||
|
`$os`,
|
||||||
|
`$country`,
|
||||||
|
`$state`,
|
||||||
|
`$city`,
|
||||||
|
`$screen_height`,
|
||||||
|
`$screen_width`,
|
||||||
|
`$source`,
|
||||||
|
`$user_id`,
|
||||||
|
`$device`
|
||||||
|
FROM product_analytics.events
|
||||||
|
WHERE {" AND ".join(constraints)}
|
||||||
|
ORDER BY created_at
|
||||||
|
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||||
|
parameters=full_args)
|
||||||
|
rows = ch_client.execute(query)
|
||||||
|
if len(rows) == 0:
|
||||||
|
return {"total": 0, "rows": [], "src": 2}
|
||||||
|
total = rows[0]["total"]
|
||||||
|
for r in rows:
|
||||||
|
r.pop("total")
|
||||||
|
return {"total": total, "rows": rows, "src": 2}
|
||||||
19
api/chalicelib/core/product_analytics/properties.py
Normal file
19
api/chalicelib/core/product_analytics/properties.py
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
from chalicelib.utils import helper
|
||||||
|
from chalicelib.utils.ch_client import ClickHouseClient
|
||||||
|
|
||||||
|
|
||||||
|
def get_properties(project_id: int, event_name):
|
||||||
|
with ClickHouseClient() as ch_client:
|
||||||
|
r = ch_client.format(
|
||||||
|
"""SELECT all_properties.property_name,
|
||||||
|
all_properties.display_name
|
||||||
|
FROM product_analytics.event_properties
|
||||||
|
INNER JOIN product_analytics.all_properties USING (property_name)
|
||||||
|
WHERE event_properties.project_id=%(project_id)s
|
||||||
|
AND all_properties.project_id=%(project_id)s
|
||||||
|
AND event_properties.event_name=%(event_name)s
|
||||||
|
ORDER BY created_at;""",
|
||||||
|
parameters={"project_id": project_id,"event_name": event_name})
|
||||||
|
properties = ch_client.execute(r)
|
||||||
|
|
||||||
|
return helper.list_to_camel_case(properties)
|
||||||
|
|
@ -6,8 +6,18 @@ logger = logging.getLogger(__name__)
|
||||||
from . import sessions_pg
|
from . import sessions_pg
|
||||||
from . import sessions_pg as sessions_legacy
|
from . import sessions_pg as sessions_legacy
|
||||||
from . import sessions_ch
|
from . import sessions_ch
|
||||||
|
from . import sessions_search_pg
|
||||||
|
from . import sessions_search_pg as sessions_search_legacy
|
||||||
|
|
||||||
if config("EXP_METRICS", cast=bool, default=False):
|
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
|
logger.info(">>> Using experimental sessions search")
|
||||||
from . import sessions_ch as sessions
|
from . import sessions_ch as sessions
|
||||||
|
from . import sessions_search_ch as sessions_search
|
||||||
else:
|
else:
|
||||||
from . import sessions_pg as sessions
|
from . import sessions_pg as sessions
|
||||||
|
from . import sessions_search_pg as sessions_search
|
||||||
|
|
||||||
|
# if config("EXP_METRICS", cast=bool, default=False):
|
||||||
|
# from . import sessions_ch as sessions
|
||||||
|
# else:
|
||||||
|
# from . import sessions_pg as sessions
|
||||||
|
|
|
||||||
|
|
@ -153,7 +153,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
||||||
"isEvent": True,
|
"isEvent": True,
|
||||||
"value": [],
|
"value": [],
|
||||||
"operator": e.operator,
|
"operator": e.operator,
|
||||||
"filters": e.filters
|
"filters": []
|
||||||
})
|
})
|
||||||
for v in e.value:
|
for v in e.value:
|
||||||
if v not in extra_conditions[e.operator].value:
|
if v not in extra_conditions[e.operator].value:
|
||||||
|
|
@ -178,7 +178,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
||||||
"isEvent": True,
|
"isEvent": True,
|
||||||
"value": [],
|
"value": [],
|
||||||
"operator": e.operator,
|
"operator": e.operator,
|
||||||
"filters": e.filters
|
"filters": []
|
||||||
})
|
})
|
||||||
for v in e.value:
|
for v in e.value:
|
||||||
if v not in extra_conditions[e.operator].value:
|
if v not in extra_conditions[e.operator].value:
|
||||||
|
|
@ -671,24 +671,36 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
events_conditions.append({"type": event_where[-1]})
|
events_conditions.append({"type": event_where[-1]})
|
||||||
if not is_any:
|
if not is_any:
|
||||||
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
if schemas.ClickEventExtraOperator.has_value(event.operator):
|
||||||
event_where.append(json_condition(
|
# event_where.append(json_condition(
|
||||||
"main",
|
# "main",
|
||||||
"$properties",
|
# "$properties",
|
||||||
"selector", op, event.value, e_k)
|
# "selector", op, event.value, e_k)
|
||||||
|
# )
|
||||||
|
event_where.append(
|
||||||
|
sh.multi_conditions(f"main.`$properties`.selector {op} %({e_k})s",
|
||||||
|
event.value, value_key=e_k)
|
||||||
)
|
)
|
||||||
events_conditions[-1]["condition"] = event_where[-1]
|
events_conditions[-1]["condition"] = event_where[-1]
|
||||||
else:
|
else:
|
||||||
if is_not:
|
if is_not:
|
||||||
event_where.append(json_condition(
|
# event_where.append(json_condition(
|
||||||
"sub", "$properties", _column, op, event.value, e_k
|
# "sub", "$properties", _column, op, event.value, e_k
|
||||||
))
|
# ))
|
||||||
|
event_where.append(
|
||||||
|
sh.multi_conditions(f"sub.`$properties`.{_column} {op} %({e_k})s",
|
||||||
|
event.value, value_key=e_k)
|
||||||
|
)
|
||||||
events_conditions_not.append(
|
events_conditions_not.append(
|
||||||
{
|
{
|
||||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||||
else:
|
else:
|
||||||
|
# event_where.append(
|
||||||
|
# json_condition("main", "$properties", _column, op, event.value, e_k)
|
||||||
|
# )
|
||||||
event_where.append(
|
event_where.append(
|
||||||
json_condition("main", "$properties", _column, op, event.value, e_k)
|
sh.multi_conditions(f"main.`$properties`.{_column} {op} %({e_k})s",
|
||||||
|
event.value, value_key=e_k)
|
||||||
)
|
)
|
||||||
events_conditions[-1]["condition"] = event_where[-1]
|
events_conditions[-1]["condition"] = event_where[-1]
|
||||||
else:
|
else:
|
||||||
|
|
@ -870,12 +882,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
events_conditions[-1]["condition"] = []
|
events_conditions[-1]["condition"] = []
|
||||||
if not is_any and event.value not in [None, "*", ""]:
|
if not is_any and event.value not in [None, "*", ""]:
|
||||||
event_where.append(
|
event_where.append(
|
||||||
sh.multi_conditions(f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
sh.multi_conditions(
|
||||||
event.value, value_key=e_k))
|
f"(toString(main1.`$properties`.message) {op} %({e_k})s OR toString(main1.`$properties`.name) {op} %({e_k})s)",
|
||||||
|
event.value, value_key=e_k))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
events_extra_join += f" AND {event_where[-1]}"
|
events_extra_join += f" AND {event_where[-1]}"
|
||||||
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
|
||||||
event_where.append(sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source, value_key=s_k))
|
event_where.append(
|
||||||
|
sh.multi_conditions(f"toString(main1.`$properties`.source) = %({s_k})s", event.source,
|
||||||
|
value_key=s_k))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
events_extra_join += f" AND {event_where[-1]}"
|
events_extra_join += f" AND {event_where[-1]}"
|
||||||
|
|
||||||
|
|
@ -1108,12 +1123,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
is_any = sh.isAny_opreator(f.operator)
|
is_any = sh.isAny_opreator(f.operator)
|
||||||
if is_any or len(f.value) == 0:
|
if is_any or len(f.value) == 0:
|
||||||
continue
|
continue
|
||||||
is_negative_operator = sh.is_negation_operator(f.operator)
|
|
||||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||||
op = sh.get_sql_operator(f.operator)
|
op = sh.get_sql_operator(f.operator)
|
||||||
r_op = ""
|
|
||||||
if is_negative_operator:
|
|
||||||
r_op = sh.reverse_sql_operator(op)
|
|
||||||
e_k_f = e_k + f"_fetch{j}"
|
e_k_f = e_k + f"_fetch{j}"
|
||||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||||
if f.type == schemas.FetchFilterType.FETCH_URL:
|
if f.type == schemas.FetchFilterType.FETCH_URL:
|
||||||
|
|
@ -1122,12 +1133,6 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
))
|
))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
apply = True
|
apply = True
|
||||||
if is_negative_operator:
|
|
||||||
events_conditions_not.append(
|
|
||||||
{
|
|
||||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
|
||||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
|
||||||
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
|
|
||||||
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
||||||
event_where.append(json_condition(
|
event_where.append(json_condition(
|
||||||
"main", "$properties", 'status', op, f.value, e_k_f, True, True
|
"main", "$properties", 'status', op, f.value, e_k_f, True, True
|
||||||
|
|
@ -1140,13 +1145,6 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
))
|
))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
apply = True
|
apply = True
|
||||||
if is_negative_operator:
|
|
||||||
events_conditions_not.append(
|
|
||||||
{
|
|
||||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
|
||||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
|
||||||
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
|
|
||||||
value_key=e_k_f)
|
|
||||||
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
|
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
|
||||||
event_where.append(
|
event_where.append(
|
||||||
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
|
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
|
||||||
|
|
@ -1159,26 +1157,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
))
|
))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
apply = True
|
apply = True
|
||||||
if is_negative_operator:
|
|
||||||
events_conditions_not.append(
|
|
||||||
{
|
|
||||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
|
||||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
|
||||||
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
|
|
||||||
value_key=e_k_f)
|
|
||||||
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
|
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
|
||||||
event_where.append(json_condition(
|
event_where.append(json_condition(
|
||||||
"main", "$properties", 'response_body', op, f.value, e_k_f
|
"main", "$properties", 'response_body', op, f.value, e_k_f
|
||||||
))
|
))
|
||||||
events_conditions[-1]["condition"].append(event_where[-1])
|
events_conditions[-1]["condition"].append(event_where[-1])
|
||||||
apply = True
|
apply = True
|
||||||
if is_negative_operator:
|
|
||||||
events_conditions_not.append(
|
|
||||||
{
|
|
||||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
|
||||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
|
||||||
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
|
|
||||||
value_key=e_k_f)
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||||
if not apply:
|
if not apply:
|
||||||
|
|
@ -1224,6 +1208,28 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
if event.properties is not None and len(event.properties.filters) > 0:
|
||||||
|
event_fiters = []
|
||||||
|
for l, property in enumerate(event.properties.filters):
|
||||||
|
a_k = f"{e_k}_att_{l}"
|
||||||
|
full_args = {**full_args,
|
||||||
|
**sh.multi_values(property.value, value_key=a_k)}
|
||||||
|
op = sh.get_sql_operator(property.operator)
|
||||||
|
condition = f"main.properties.{property.name} {op} %({a_k})s"
|
||||||
|
if property.is_predefined:
|
||||||
|
condition = f"main.{property.name} {op} %({a_k})s"
|
||||||
|
event_where.append(
|
||||||
|
sh.multi_conditions(condition, property.value, value_key=a_k)
|
||||||
|
)
|
||||||
|
event_fiters.append(event_where[-1])
|
||||||
|
if len(event_fiters) > 0:
|
||||||
|
events_conditions[-1]["condition"] += " AND ("
|
||||||
|
for l, e_f in enumerate(event_fiters):
|
||||||
|
if l > 0:
|
||||||
|
events_conditions[-1]["condition"] += event.properties.operators[l - 1] + e_f
|
||||||
|
else:
|
||||||
|
events_conditions[-1]["condition"] += e_f
|
||||||
|
events_conditions[-1]["condition"] += ")"
|
||||||
if event_index == 0 or or_events:
|
if event_index == 0 or or_events:
|
||||||
event_where += ss_constraints
|
event_where += ss_constraints
|
||||||
if is_not:
|
if is_not:
|
||||||
|
|
@ -1426,30 +1432,17 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
||||||
if extra_conditions and len(extra_conditions) > 0:
|
if extra_conditions and len(extra_conditions) > 0:
|
||||||
_extra_or_condition = []
|
_extra_or_condition = []
|
||||||
for i, c in enumerate(extra_conditions):
|
for i, c in enumerate(extra_conditions):
|
||||||
if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
|
if sh.isAny_opreator(c.operator):
|
||||||
continue
|
continue
|
||||||
e_k = f"ec_value{i}"
|
e_k = f"ec_value{i}"
|
||||||
op = sh.get_sql_operator(c.operator)
|
op = sh.get_sql_operator(c.operator)
|
||||||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||||
full_args = {**full_args,
|
full_args = {**full_args,
|
||||||
**sh.multi_values(c.value, value_key=e_k)}
|
**sh.multi_values(c.value, value_key=e_k)}
|
||||||
if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
|
if c.type == events.EventType.LOCATION.ui_type:
|
||||||
_extra_or_condition.append(
|
_extra_or_condition.append(
|
||||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||||
c.value, value_key=e_k))
|
c.value, value_key=e_k))
|
||||||
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
|
|
||||||
for j, c_f in enumerate(c.filters):
|
|
||||||
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
|
|
||||||
continue
|
|
||||||
e_k += f"_{j}"
|
|
||||||
op = sh.get_sql_operator(c_f.operator)
|
|
||||||
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
|
|
||||||
full_args = {**full_args,
|
|
||||||
**sh.multi_values(c_f.value, value_key=e_k)}
|
|
||||||
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
|
|
||||||
_extra_or_condition.append(
|
|
||||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
|
||||||
c_f.value, value_key=e_k))
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||||
if len(_extra_or_condition) > 0:
|
if len(_extra_or_condition) > 0:
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
import ast
|
import ast
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Union
|
|
||||||
|
|
||||||
import schemas
|
import schemas
|
||||||
from chalicelib.core import events, metadata, projects
|
from chalicelib.core import events, metadata, projects
|
||||||
|
|
@ -148,7 +148,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
||||||
"isEvent": True,
|
"isEvent": True,
|
||||||
"value": [],
|
"value": [],
|
||||||
"operator": e.operator,
|
"operator": e.operator,
|
||||||
"filters": e.filters
|
"filters": []
|
||||||
})
|
})
|
||||||
for v in e.value:
|
for v in e.value:
|
||||||
if v not in extra_conditions[e.operator].value:
|
if v not in extra_conditions[e.operator].value:
|
||||||
|
|
@ -165,7 +165,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
||||||
"isEvent": True,
|
"isEvent": True,
|
||||||
"value": [],
|
"value": [],
|
||||||
"operator": e.operator,
|
"operator": e.operator,
|
||||||
"filters": e.filters
|
"filters": []
|
||||||
})
|
})
|
||||||
for v in e.value:
|
for v in e.value:
|
||||||
if v not in extra_conditions[e.operator].value:
|
if v not in extra_conditions[e.operator].value:
|
||||||
|
|
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
||||||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||||
c.value, value_key=e_k))
|
c.value, value_key=e_k))
|
||||||
else:
|
else:
|
||||||
logger.warning(f"unsupported extra_event type: {c.type}")
|
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||||
if len(_extra_or_condition) > 0:
|
if len(_extra_or_condition) > 0:
|
||||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||||
query_part = f"""\
|
query_part = f"""\
|
||||||
|
|
|
||||||
|
|
@ -175,11 +175,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
|
||||||
ORDER BY sort_key {data.order}
|
ORDER BY sort_key {data.order}
|
||||||
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""",
|
||||||
parameters=full_args)
|
parameters=full_args)
|
||||||
logging.debug("--------------------")
|
|
||||||
logging.debug(main_query)
|
|
||||||
logging.debug("--------------------")
|
|
||||||
try:
|
try:
|
||||||
|
logging.debug("--------------------")
|
||||||
sessions_list = cur.execute(main_query)
|
sessions_list = cur.execute(main_query)
|
||||||
|
logging.debug("--------------------")
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
|
||||||
logging.warning(main_query)
|
logging.warning(main_query)
|
||||||
|
|
@ -11,9 +11,3 @@ if smtp.has_smtp():
|
||||||
logger.info("valid SMTP configuration found")
|
logger.info("valid SMTP configuration found")
|
||||||
else:
|
else:
|
||||||
logger.info("no SMTP configuration found or SMTP validation failed")
|
logger.info("no SMTP configuration found or SMTP validation failed")
|
||||||
|
|
||||||
if config("EXP_CH_DRIVER", cast=bool, default=True):
|
|
||||||
logging.info(">>> Using new CH driver")
|
|
||||||
from . import ch_client_exp as ch_client
|
|
||||||
else:
|
|
||||||
from . import ch_client
|
|
||||||
|
|
|
||||||
|
|
@ -1,73 +1,185 @@
|
||||||
import logging
|
import logging
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from functools import wraps
|
||||||
|
from queue import Queue, Empty
|
||||||
|
|
||||||
import clickhouse_driver
|
import clickhouse_connect
|
||||||
|
from clickhouse_connect.driver.query import QueryContext
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_CH_CONFIG = {"host": config("ch_host"),
|
||||||
|
"user": config("ch_user", default="default"),
|
||||||
|
"password": config("ch_password", default=""),
|
||||||
|
"port": config("ch_port_http", cast=int),
|
||||||
|
"client_name": config("APP_NAME", default="PY")}
|
||||||
|
CH_CONFIG = dict(_CH_CONFIG)
|
||||||
|
|
||||||
settings = {}
|
settings = {}
|
||||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
if config('ch_timeout', cast=int, default=-1) > 0:
|
||||||
logger.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
||||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
||||||
|
|
||||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
||||||
logger.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
||||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
||||||
|
|
||||||
|
extra_args = {}
|
||||||
|
if config("CH_COMPRESSION", cast=bool, default=True):
|
||||||
|
extra_args["compression"] = "lz4"
|
||||||
|
|
||||||
|
|
||||||
|
def transform_result(self, original_function):
|
||||||
|
@wraps(original_function)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
if kwargs.get("parameters"):
|
||||||
|
if config("LOCAL_DEV", cast=bool, default=False):
|
||||||
|
logger.debug(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters")))
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
||||||
|
elif len(args) > 0:
|
||||||
|
if config("LOCAL_DEV", cast=bool, default=False):
|
||||||
|
logger.debug(args[0])
|
||||||
|
else:
|
||||||
|
logger.debug(str.encode(args[0]))
|
||||||
|
result = original_function(*args, **kwargs)
|
||||||
|
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
||||||
|
column_names = result.column_names
|
||||||
|
result = result.result_rows
|
||||||
|
result = [dict(zip(column_names, row)) for row in result]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class ClickHouseConnectionPool:
|
||||||
|
def __init__(self, min_size, max_size):
|
||||||
|
self.min_size = min_size
|
||||||
|
self.max_size = max_size
|
||||||
|
self.pool = Queue()
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
self.total_connections = 0
|
||||||
|
|
||||||
|
# Initialize the pool with min_size connections
|
||||||
|
for _ in range(self.min_size):
|
||||||
|
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||||
|
database=config("ch_database", default="default"),
|
||||||
|
settings=settings,
|
||||||
|
**extra_args)
|
||||||
|
self.pool.put(client)
|
||||||
|
self.total_connections += 1
|
||||||
|
|
||||||
|
def get_connection(self):
|
||||||
|
try:
|
||||||
|
# Try to get a connection without blocking
|
||||||
|
client = self.pool.get_nowait()
|
||||||
|
return client
|
||||||
|
except Empty:
|
||||||
|
with self.lock:
|
||||||
|
if self.total_connections < self.max_size:
|
||||||
|
client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||||
|
database=config("ch_database", default="default"),
|
||||||
|
settings=settings,
|
||||||
|
**extra_args)
|
||||||
|
self.total_connections += 1
|
||||||
|
return client
|
||||||
|
# If max_size reached, wait until a connection is available
|
||||||
|
client = self.pool.get()
|
||||||
|
return client
|
||||||
|
|
||||||
|
def release_connection(self, client):
|
||||||
|
self.pool.put(client)
|
||||||
|
|
||||||
|
def close_all(self):
|
||||||
|
with self.lock:
|
||||||
|
while not self.pool.empty():
|
||||||
|
client = self.pool.get()
|
||||||
|
client.close()
|
||||||
|
self.total_connections = 0
|
||||||
|
|
||||||
|
|
||||||
|
CH_pool: ClickHouseConnectionPool = None
|
||||||
|
|
||||||
|
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
||||||
|
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
||||||
|
RETRY = 0
|
||||||
|
|
||||||
|
|
||||||
|
def make_pool():
|
||||||
|
if not config('CH_POOL', cast=bool, default=True):
|
||||||
|
return
|
||||||
|
global CH_pool
|
||||||
|
global RETRY
|
||||||
|
if CH_pool is not None:
|
||||||
|
try:
|
||||||
|
CH_pool.close_all()
|
||||||
|
except Exception as error:
|
||||||
|
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||||
|
try:
|
||||||
|
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||||
|
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||||
|
if CH_pool is not None:
|
||||||
|
logger.info("Connection pool created successfully for CH")
|
||||||
|
except ConnectionError as error:
|
||||||
|
logger.error("Error while connecting to CH", exc_info=error)
|
||||||
|
if RETRY < RETRY_MAX:
|
||||||
|
RETRY += 1
|
||||||
|
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||||
|
time.sleep(RETRY_INTERVAL)
|
||||||
|
make_pool()
|
||||||
|
else:
|
||||||
|
raise error
|
||||||
|
|
||||||
|
|
||||||
class ClickHouseClient:
|
class ClickHouseClient:
|
||||||
__client = None
|
__client = None
|
||||||
|
|
||||||
def __init__(self, database=None):
|
def __init__(self, database=None):
|
||||||
extra_args = {}
|
if self.__client is None:
|
||||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
||||||
extra_args["compression"] = "lz4"
|
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
||||||
self.__client = clickhouse_driver.Client(host=config("ch_host"),
|
database=database if database else config("ch_database",
|
||||||
database=database if database else config("ch_database",
|
default="default"),
|
||||||
default="default"),
|
settings=settings,
|
||||||
user=config("ch_user", default="default"),
|
**extra_args)
|
||||||
password=config("ch_password", default=""),
|
|
||||||
port=config("ch_port", cast=int),
|
else:
|
||||||
settings=settings,
|
self.__client = CH_pool.get_connection()
|
||||||
**extra_args) \
|
|
||||||
if self.__client is None else self.__client
|
self.__client.execute = transform_result(self, self.__client.query)
|
||||||
|
self.__client.format = self.format
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self
|
|
||||||
|
|
||||||
def execute(self, query, parameters=None, **args):
|
|
||||||
try:
|
|
||||||
results = self.__client.execute(query=query, params=parameters, with_column_types=True, **args)
|
|
||||||
keys = tuple(x for x, y in results[1])
|
|
||||||
return [dict(zip(keys, i)) for i in results[0]]
|
|
||||||
except Exception as err:
|
|
||||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
|
||||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
|
||||||
logger.error(self.format(query=query, parameters=parameters)
|
|
||||||
.replace('\n', '\\n')
|
|
||||||
.replace(' ', ' ')
|
|
||||||
.replace(' ', ' '))
|
|
||||||
logger.error("--------------------")
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def insert(self, query, params=None, **args):
|
|
||||||
return self.__client.execute(query=query, params=params, **args)
|
|
||||||
|
|
||||||
def client(self):
|
|
||||||
return self.__client
|
return self.__client
|
||||||
|
|
||||||
def format(self, query, parameters):
|
def format(self, query, parameters=None):
|
||||||
if parameters is None:
|
if parameters:
|
||||||
return query
|
ctx = QueryContext(query=query, parameters=parameters)
|
||||||
return self.__client.substitute_params(query, parameters, self.__client.connection.context)
|
return ctx.final_query
|
||||||
|
return query
|
||||||
|
|
||||||
def __exit__(self, *args):
|
def __exit__(self, *args):
|
||||||
pass
|
if config('CH_POOL', cast=bool, default=True):
|
||||||
|
CH_pool.release_connection(self.__client)
|
||||||
|
else:
|
||||||
|
self.__client.close()
|
||||||
|
|
||||||
|
|
||||||
async def init():
|
async def init():
|
||||||
logger.info(f">CH_POOL:not defined")
|
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
||||||
|
if config('CH_POOL', cast=bool, default=True):
|
||||||
|
make_pool()
|
||||||
|
|
||||||
|
|
||||||
async def terminate():
|
async def terminate():
|
||||||
pass
|
global CH_pool
|
||||||
|
if CH_pool is not None:
|
||||||
|
try:
|
||||||
|
CH_pool.close_all()
|
||||||
|
logger.info("Closed all connexions to CH")
|
||||||
|
except Exception as error:
|
||||||
|
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||||
|
|
|
||||||
|
|
@ -1,178 +0,0 @@
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from functools import wraps
|
|
||||||
from queue import Queue, Empty
|
|
||||||
|
|
||||||
import clickhouse_connect
|
|
||||||
from clickhouse_connect.driver.query import QueryContext
|
|
||||||
from decouple import config
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
_CH_CONFIG = {"host": config("ch_host"),
|
|
||||||
"user": config("ch_user", default="default"),
|
|
||||||
"password": config("ch_password", default=""),
|
|
||||||
"port": config("ch_port_http", cast=int),
|
|
||||||
"client_name": config("APP_NAME", default="PY")}
|
|
||||||
CH_CONFIG = dict(_CH_CONFIG)
|
|
||||||
|
|
||||||
settings = {}
|
|
||||||
if config('ch_timeout', cast=int, default=-1) > 0:
|
|
||||||
logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s")
|
|
||||||
settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)}
|
|
||||||
|
|
||||||
if config('ch_receive_timeout', cast=int, default=-1) > 0:
|
|
||||||
logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s")
|
|
||||||
settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)}
|
|
||||||
|
|
||||||
extra_args = {}
|
|
||||||
if config("CH_COMPRESSION", cast=bool, default=True):
|
|
||||||
extra_args["compression"] = "lz4"
|
|
||||||
|
|
||||||
|
|
||||||
def transform_result(self, original_function):
|
|
||||||
@wraps(original_function)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
if kwargs.get("parameters"):
|
|
||||||
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
|
|
||||||
elif len(args) > 0:
|
|
||||||
logger.debug(str.encode(args[0]))
|
|
||||||
result = original_function(*args, **kwargs)
|
|
||||||
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
|
|
||||||
column_names = result.column_names
|
|
||||||
result = result.result_rows
|
|
||||||
result = [dict(zip(column_names, row)) for row in result]
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class ClickHouseConnectionPool:
|
|
||||||
def __init__(self, min_size, max_size):
|
|
||||||
self.min_size = min_size
|
|
||||||
self.max_size = max_size
|
|
||||||
self.pool = Queue()
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.total_connections = 0
|
|
||||||
|
|
||||||
# Initialize the pool with min_size connections
|
|
||||||
for _ in range(self.min_size):
|
|
||||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
|
||||||
database=config("ch_database", default="default"),
|
|
||||||
settings=settings,
|
|
||||||
**extra_args)
|
|
||||||
self.pool.put(client)
|
|
||||||
self.total_connections += 1
|
|
||||||
|
|
||||||
def get_connection(self):
|
|
||||||
try:
|
|
||||||
# Try to get a connection without blocking
|
|
||||||
client = self.pool.get_nowait()
|
|
||||||
return client
|
|
||||||
except Empty:
|
|
||||||
with self.lock:
|
|
||||||
if self.total_connections < self.max_size:
|
|
||||||
client = clickhouse_connect.get_client(**CH_CONFIG,
|
|
||||||
database=config("ch_database", default="default"),
|
|
||||||
settings=settings,
|
|
||||||
**extra_args)
|
|
||||||
self.total_connections += 1
|
|
||||||
return client
|
|
||||||
# If max_size reached, wait until a connection is available
|
|
||||||
client = self.pool.get()
|
|
||||||
return client
|
|
||||||
|
|
||||||
def release_connection(self, client):
|
|
||||||
self.pool.put(client)
|
|
||||||
|
|
||||||
def close_all(self):
|
|
||||||
with self.lock:
|
|
||||||
while not self.pool.empty():
|
|
||||||
client = self.pool.get()
|
|
||||||
client.close()
|
|
||||||
self.total_connections = 0
|
|
||||||
|
|
||||||
|
|
||||||
CH_pool: ClickHouseConnectionPool = None
|
|
||||||
|
|
||||||
RETRY_MAX = config("CH_RETRY_MAX", cast=int, default=50)
|
|
||||||
RETRY_INTERVAL = config("CH_RETRY_INTERVAL", cast=int, default=2)
|
|
||||||
RETRY = 0
|
|
||||||
|
|
||||||
|
|
||||||
def make_pool():
|
|
||||||
if not config('CH_POOL', cast=bool, default=True):
|
|
||||||
return
|
|
||||||
global CH_pool
|
|
||||||
global RETRY
|
|
||||||
if CH_pool is not None:
|
|
||||||
try:
|
|
||||||
CH_pool.close_all()
|
|
||||||
except Exception as error:
|
|
||||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
|
||||||
try:
|
|
||||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
|
||||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
|
||||||
if CH_pool is not None:
|
|
||||||
logger.info("Connection pool created successfully for CH")
|
|
||||||
except ConnectionError as error:
|
|
||||||
logger.error("Error while connecting to CH", exc_info=error)
|
|
||||||
if RETRY < RETRY_MAX:
|
|
||||||
RETRY += 1
|
|
||||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
|
||||||
time.sleep(RETRY_INTERVAL)
|
|
||||||
make_pool()
|
|
||||||
else:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
|
|
||||||
class ClickHouseClient:
|
|
||||||
__client = None
|
|
||||||
|
|
||||||
def __init__(self, database=None):
|
|
||||||
if self.__client is None:
|
|
||||||
if database is not None or not config('CH_POOL', cast=bool, default=True):
|
|
||||||
self.__client = clickhouse_connect.get_client(**CH_CONFIG,
|
|
||||||
database=database if database else config("ch_database",
|
|
||||||
default="default"),
|
|
||||||
settings=settings,
|
|
||||||
**extra_args)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.__client = CH_pool.get_connection()
|
|
||||||
|
|
||||||
self.__client.execute = transform_result(self, self.__client.query)
|
|
||||||
self.__client.format = self.format
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self.__client
|
|
||||||
|
|
||||||
def format(self, query, parameters=None):
|
|
||||||
if parameters:
|
|
||||||
ctx = QueryContext(query=query, parameters=parameters)
|
|
||||||
return ctx.final_query
|
|
||||||
return query
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
if config('CH_POOL', cast=bool, default=True):
|
|
||||||
CH_pool.release_connection(self.__client)
|
|
||||||
else:
|
|
||||||
self.__client.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def init():
|
|
||||||
logger.info(f">use CH_POOL:{config('CH_POOL', default=True)}")
|
|
||||||
if config('CH_POOL', cast=bool, default=True):
|
|
||||||
make_pool()
|
|
||||||
|
|
||||||
|
|
||||||
async def terminate():
|
|
||||||
global CH_pool
|
|
||||||
if CH_pool is not None:
|
|
||||||
try:
|
|
||||||
CH_pool.close_all()
|
|
||||||
logger.info("Closed all connexions to CH")
|
|
||||||
except Exception as error:
|
|
||||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
|
||||||
|
|
@ -4,41 +4,37 @@ import schemas
|
||||||
|
|
||||||
|
|
||||||
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
|
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
|
||||||
if isinstance(op, Enum):
|
|
||||||
op = op.value
|
|
||||||
return {
|
return {
|
||||||
schemas.SearchEventOperator.IS.value: "=",
|
schemas.SearchEventOperator.IS: "=",
|
||||||
schemas.SearchEventOperator.ON.value: "=",
|
schemas.SearchEventOperator.ON: "=",
|
||||||
schemas.SearchEventOperator.ON_ANY.value: "IN",
|
schemas.SearchEventOperator.ON_ANY: "IN",
|
||||||
schemas.SearchEventOperator.IS_NOT.value: "!=",
|
schemas.SearchEventOperator.IS_NOT: "!=",
|
||||||
schemas.SearchEventOperator.NOT_ON.value: "!=",
|
schemas.SearchEventOperator.NOT_ON: "!=",
|
||||||
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
|
schemas.SearchEventOperator.CONTAINS: "ILIKE",
|
||||||
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||||
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
|
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||||
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
|
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||||
# Selector operators:
|
# Selector operators:
|
||||||
schemas.ClickEventExtraOperator.IS.value: "=",
|
schemas.ClickEventExtraOperator.IS: "=",
|
||||||
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
|
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||||
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
|
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
|
||||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||||
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
|
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
|
||||||
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
|
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
|
||||||
|
|
||||||
schemas.MathOperator.GREATER.value: ">",
|
schemas.MathOperator.GREATER: ">",
|
||||||
schemas.MathOperator.GREATER_EQ.value: ">=",
|
schemas.MathOperator.GREATER_EQ: ">=",
|
||||||
schemas.MathOperator.LESS.value: "<",
|
schemas.MathOperator.LESS: "<",
|
||||||
schemas.MathOperator.LESS_EQ.value: "<=",
|
schemas.MathOperator.LESS_EQ: "<=",
|
||||||
}.get(op, "=")
|
}.get(op, "=")
|
||||||
|
|
||||||
|
|
||||||
def is_negation_operator(op: schemas.SearchEventOperator):
|
def is_negation_operator(op: schemas.SearchEventOperator):
|
||||||
if isinstance(op, Enum):
|
return op in [schemas.SearchEventOperator.IS_NOT,
|
||||||
op = op.value
|
schemas.SearchEventOperator.NOT_ON,
|
||||||
return op in [schemas.SearchEventOperator.IS_NOT.value,
|
schemas.SearchEventOperator.NOT_CONTAINS,
|
||||||
schemas.SearchEventOperator.NOT_ON.value,
|
schemas.ClickEventExtraOperator.IS_NOT,
|
||||||
schemas.SearchEventOperator.NOT_CONTAINS.value,
|
schemas.ClickEventExtraOperator.NOT_CONTAINS]
|
||||||
schemas.ClickEventExtraOperator.IS_NOT.value,
|
|
||||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
|
|
||||||
|
|
||||||
|
|
||||||
def reverse_sql_operator(op):
|
def reverse_sql_operator(op):
|
||||||
|
|
|
||||||
|
|
@ -74,4 +74,5 @@ EXP_CH_DRIVER=true
|
||||||
EXP_AUTOCOMPLETE=true
|
EXP_AUTOCOMPLETE=true
|
||||||
EXP_ALERTS=true
|
EXP_ALERTS=true
|
||||||
EXP_ERRORS_SEARCH=true
|
EXP_ERRORS_SEARCH=true
|
||||||
EXP_METRICS=true
|
EXP_METRICS=true
|
||||||
|
EXP_SESSIONS_SEARCH=true
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.36.12
|
boto3==1.37.16
|
||||||
pyjwt==2.10.1
|
pyjwt==2.10.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
psycopg[pool,binary]==3.2.4
|
psycopg[pool,binary]==3.2.6
|
||||||
clickhouse-driver[lz4]==0.2.9
|
|
||||||
clickhouse-connect==0.8.15
|
clickhouse-connect==0.8.15
|
||||||
elasticsearch==8.17.1
|
elasticsearch==8.17.2
|
||||||
jira==3.8.0
|
jira==3.8.0
|
||||||
cachetools==5.5.1
|
cachetools==5.5.2
|
||||||
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.11
|
||||||
uvicorn[standard]==0.34.0
|
uvicorn[standard]==0.34.0
|
||||||
python-decouple==3.8
|
python-decouple==3.8
|
||||||
pydantic[email]==2.10.6
|
pydantic[email]==2.10.6
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.36.12
|
boto3==1.37.16
|
||||||
pyjwt==2.10.1
|
pyjwt==2.10.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
psycopg[pool,binary]==3.2.4
|
psycopg[pool,binary]==3.2.6
|
||||||
clickhouse-driver[lz4]==0.2.9
|
|
||||||
clickhouse-connect==0.8.15
|
clickhouse-connect==0.8.15
|
||||||
elasticsearch==8.17.1
|
elasticsearch==8.17.2
|
||||||
jira==3.8.0
|
jira==3.8.0
|
||||||
cachetools==5.5.1
|
cachetools==5.5.2
|
||||||
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.11
|
||||||
uvicorn[standard]==0.34.0
|
uvicorn[standard]==0.34.0
|
||||||
python-decouple==3.8
|
python-decouple==3.8
|
||||||
pydantic[email]==2.10.6
|
pydantic[email]==2.10.6
|
||||||
|
|
|
||||||
28
api/routers/subs/product_analytics.py
Normal file
28
api/routers/subs/product_analytics.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
import schemas
|
||||||
|
from chalicelib.core.product_analytics import events, properties
|
||||||
|
from fastapi import Depends
|
||||||
|
from or_dependencies import OR_context
|
||||||
|
from routers.base import get_routers
|
||||||
|
from fastapi import Body, Depends, BackgroundTasks
|
||||||
|
|
||||||
|
public_app, app, app_apikey = get_routers()
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/{projectId}/properties/search', tags=["product_analytics"])
|
||||||
|
def get_event_properties(projectId: int, event_name: str = None,
|
||||||
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
if not event_name or len(event_name) == 0:
|
||||||
|
return {"data": []}
|
||||||
|
return {"data": properties.get_properties(project_id=projectId, event_name=event_name)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/{projectId}/events/names', tags=["product_analytics"])
|
||||||
|
def get_all_events(projectId: int,
|
||||||
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
return {"data": events.get_events(project_id=projectId)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/{projectId}/events/search', tags=["product_analytics"])
|
||||||
|
def search_events(projectId: int, data: schemas.EventsSearchPayloadSchema = Body(...),
|
||||||
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
return {"data": events.search_events(project_id=projectId, data=data)}
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
import schemas
|
|
||||||
from chalicelib.core.metrics import product_anaytics2
|
|
||||||
from fastapi import Depends
|
|
||||||
from or_dependencies import OR_context
|
|
||||||
from routers.base import get_routers
|
|
||||||
|
|
||||||
|
|
||||||
public_app, app, app_apikey = get_routers()
|
|
||||||
|
|
||||||
|
|
||||||
@app.post('/{projectId}/events/search', tags=["dashboard"])
|
|
||||||
def search_events(projectId: int,
|
|
||||||
# data: schemas.CreateDashboardSchema = Body(...),
|
|
||||||
context: schemas.CurrentContext = Depends(OR_context)):
|
|
||||||
return product_anaytics2.search_events(project_id=projectId, data={})
|
|
||||||
|
|
@ -1,2 +1,3 @@
|
||||||
from .schemas import *
|
from .schemas import *
|
||||||
|
from .product_analytics import *
|
||||||
from . import overrides as _overrides
|
from . import overrides as _overrides
|
||||||
|
|
|
||||||
19
api/schemas/product_analytics.py
Normal file
19
api/schemas/product_analytics.py
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from .overrides import BaseModel
|
||||||
|
from .schemas import EventPropertiesSchema, SortOrderType, _TimedSchema, \
|
||||||
|
_PaginatedSchema, PropertyFilterSchema
|
||||||
|
|
||||||
|
|
||||||
|
class EventSearchSchema(BaseModel):
|
||||||
|
event_name: str = Field(...)
|
||||||
|
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
class EventsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||||
|
events: List[EventSearchSchema] = Field(default_factory=list, description="operator between events is OR")
|
||||||
|
filters: List[PropertyFilterSchema] = Field(default_factory=list, description="operator between filters is AND")
|
||||||
|
sort: str = Field(default="startTs")
|
||||||
|
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||||
|
|
@ -545,6 +545,70 @@ class RequestGraphqlFilterSchema(BaseModel):
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
class EventPredefinedPropertyType(str, Enum):
|
||||||
|
TIME = "$time"
|
||||||
|
SOURCE = "$source"
|
||||||
|
DURATION_S = "$duration_s"
|
||||||
|
DESCRIPTION = "description"
|
||||||
|
AUTO_CAPTURED = "$auto_captured"
|
||||||
|
SDK_EDITION = "$sdk_edition"
|
||||||
|
SDK_VERSION = "$sdk_version"
|
||||||
|
DEVICE_ID = "$device_id"
|
||||||
|
OS = "$os"
|
||||||
|
OS_VERSION = "$os_version"
|
||||||
|
BROWSER = "$browser"
|
||||||
|
BROWSER_VERSION = "$browser_version"
|
||||||
|
DEVICE = "$device"
|
||||||
|
SCREEN_HEIGHT = "$screen_height"
|
||||||
|
SCREEN_WIDTH = "$screen_width"
|
||||||
|
CURRENT_URL = "$current_url"
|
||||||
|
INITIAL_REFERRER = "$initial_referrer"
|
||||||
|
REFERRING_DOMAIN = "$referring_domain"
|
||||||
|
REFERRER = "$referrer"
|
||||||
|
INITIAL_REFERRING_DOMAIN = "$initial_referring_domain"
|
||||||
|
SEARCH_ENGINE = "$search_engine"
|
||||||
|
SEARCH_ENGINE_KEYWORD = "$search_engine_keyword"
|
||||||
|
UTM_SOURCE = "utm_source"
|
||||||
|
UTM_MEDIUM = "utm_medium"
|
||||||
|
UTM_CAMPAIGN = "utm_campaign"
|
||||||
|
COUNTRY = "$country"
|
||||||
|
STATE = "$state"
|
||||||
|
CITY = "$city"
|
||||||
|
ISSUE_TYPE = "issue_type"
|
||||||
|
TAGS = "$tags"
|
||||||
|
IMPORT = "$import"
|
||||||
|
|
||||||
|
|
||||||
|
class PropertyFilterSchema(BaseModel):
|
||||||
|
name: Union[EventPredefinedPropertyType, str] = Field(...)
|
||||||
|
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||||
|
value: List[Union[int, str]] = Field(...)
|
||||||
|
property_type: Optional[Literal["string", "number", "date"]] = Field(default=None)
|
||||||
|
|
||||||
|
@computed_field
|
||||||
|
@property
|
||||||
|
def is_predefined(self) -> bool:
|
||||||
|
return EventPredefinedPropertyType.has_value(self.name)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def transform_name(self):
|
||||||
|
if isinstance(self.name, Enum):
|
||||||
|
self.name = self.name.value
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class EventPropertiesSchema(BaseModel):
|
||||||
|
operators: List[Literal["and", "or"]] = Field(...)
|
||||||
|
filters: List[PropertyFilterSchema] = Field(...)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def event_filter_validator(self):
|
||||||
|
assert len(self.filters) == 0 \
|
||||||
|
or len(self.operators) == len(self.filters) - 1, \
|
||||||
|
"Number of operators must match the number of filter-1"
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
class SessionSearchEventSchema2(BaseModel):
|
class SessionSearchEventSchema2(BaseModel):
|
||||||
is_event: Literal[True] = True
|
is_event: Literal[True] = True
|
||||||
value: List[Union[str, int]] = Field(...)
|
value: List[Union[str, int]] = Field(...)
|
||||||
|
|
@ -553,6 +617,7 @@ class SessionSearchEventSchema2(BaseModel):
|
||||||
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None)
|
||||||
sourceOperator: Optional[MathOperator] = Field(default=None)
|
sourceOperator: Optional[MathOperator] = Field(default=None)
|
||||||
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default_factory=list)
|
||||||
|
properties: Optional[EventPropertiesSchema] = Field(default=None)
|
||||||
|
|
||||||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||||
|
|
@ -960,6 +1025,36 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
# We don't need this as the UI is expecting filters to override the full series' filters
|
||||||
|
# @model_validator(mode="after")
|
||||||
|
# def __merge_out_filters_with_series(self):
|
||||||
|
# for f in self.filters:
|
||||||
|
# for s in self.series:
|
||||||
|
# found = False
|
||||||
|
#
|
||||||
|
# if f.is_event:
|
||||||
|
# sub = s.filter.events
|
||||||
|
# else:
|
||||||
|
# sub = s.filter.filters
|
||||||
|
#
|
||||||
|
# for e in sub:
|
||||||
|
# if f.type == e.type and f.operator == e.operator:
|
||||||
|
# found = True
|
||||||
|
# if f.is_event:
|
||||||
|
# # If extra event: append value
|
||||||
|
# for v in f.value:
|
||||||
|
# if v not in e.value:
|
||||||
|
# e.value.append(v)
|
||||||
|
# else:
|
||||||
|
# # If extra filter: override value
|
||||||
|
# e.value = f.value
|
||||||
|
# if not found:
|
||||||
|
# sub.append(f)
|
||||||
|
#
|
||||||
|
# self.filters = []
|
||||||
|
#
|
||||||
|
# return self
|
||||||
|
|
||||||
# UI is expecting filters to override the full series' filters
|
# UI is expecting filters to override the full series' filters
|
||||||
@model_validator(mode="after")
|
@model_validator(mode="after")
|
||||||
def __override_series_filters_with_outer_filters(self):
|
def __override_series_filters_with_outer_filters(self):
|
||||||
|
|
@ -1030,16 +1125,6 @@ class CardTable(__CardSchema):
|
||||||
values["metricValue"] = []
|
values["metricValue"] = []
|
||||||
return values
|
return values
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def __enforce_AND_operator(self):
|
|
||||||
self.metric_of = MetricOfTable(self.metric_of)
|
|
||||||
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
|
|
||||||
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
|
|
||||||
for s in self.series:
|
|
||||||
if s.filter is not None:
|
|
||||||
s.filter.events_order = SearchEventOrder.AND
|
|
||||||
return self
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
@model_validator(mode="after")
|
||||||
def __transform(self):
|
def __transform(self):
|
||||||
self.metric_of = MetricOfTable(self.metric_of)
|
self.metric_of = MetricOfTable(self.metric_of)
|
||||||
|
|
@ -1115,7 +1200,7 @@ class CardPathAnalysis(__CardSchema):
|
||||||
view_type: MetricOtherViewType = Field(...)
|
view_type: MetricOtherViewType = Field(...)
|
||||||
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
|
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
|
||||||
density: int = Field(default=4, ge=2, le=10)
|
density: int = Field(default=4, ge=2, le=10)
|
||||||
rows: int = Field(default=5, ge=1, le=10)
|
rows: int = Field(default=3, ge=1, le=10)
|
||||||
|
|
||||||
start_type: Literal["start", "end"] = Field(default="start")
|
start_type: Literal["start", "end"] = Field(default="start")
|
||||||
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)
|
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)
|
||||||
|
|
@ -1509,3 +1594,30 @@ class TagCreate(TagUpdate):
|
||||||
|
|
||||||
class ScopeSchema(BaseModel):
|
class ScopeSchema(BaseModel):
|
||||||
scope: int = Field(default=1, ge=1, le=2)
|
scope: int = Field(default=1, ge=1, le=2)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionModel(BaseModel):
|
||||||
|
duration: int
|
||||||
|
errorsCount: int
|
||||||
|
eventsCount: int
|
||||||
|
favorite: bool = Field(default=False)
|
||||||
|
issueScore: int
|
||||||
|
issueTypes: List[IssueType] = Field(default=[])
|
||||||
|
metadata: dict = Field(default={})
|
||||||
|
pagesCount: int
|
||||||
|
platform: str
|
||||||
|
projectId: int
|
||||||
|
sessionId: str
|
||||||
|
startTs: int
|
||||||
|
timezone: Optional[str]
|
||||||
|
userAnonymousId: Optional[str]
|
||||||
|
userBrowser: str
|
||||||
|
userCity: str
|
||||||
|
userCountry: str
|
||||||
|
userDevice: Optional[str]
|
||||||
|
userDeviceType: str
|
||||||
|
userId: Optional[str]
|
||||||
|
userOs: str
|
||||||
|
userState: str
|
||||||
|
userUuid: str
|
||||||
|
viewed: bool = Field(default=False)
|
||||||
|
|
|
||||||
|
|
@ -19,16 +19,14 @@ const EVENTS_DEFINITION = {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
EVENTS_DEFINITION.emit = {
|
EVENTS_DEFINITION.emit = {
|
||||||
NEW_AGENT: "NEW_AGENT",
|
NEW_AGENT: "NEW_AGENT",
|
||||||
NO_AGENTS: "NO_AGENT",
|
NO_AGENTS: "NO_AGENT",
|
||||||
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
|
||||||
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
AGENTS_CONNECTED: "AGENTS_CONNECTED",
|
||||||
AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
|
NO_SESSIONS: "SESSION_DISCONNECTED",
|
||||||
NO_SESSIONS: "SESSION_DISCONNECTED",
|
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
||||||
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
|
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
||||||
SESSION_RECONNECTED: "SESSION_RECONNECTED",
|
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
|
||||||
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
|
|
||||||
WEBRTC_CONFIG: "WEBRTC_CONFIG",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const BASE_sessionInfo = {
|
const BASE_sessionInfo = {
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
async function getRoomData(io, roomID) {
|
async function getRoomData(io, roomID) {
|
||||||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
|
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||||
const connected_sockets = await io.in(roomID).fetchSockets();
|
const connected_sockets = await io.in(roomID).fetchSockets();
|
||||||
if (connected_sockets.length > 0) {
|
if (connected_sockets.length > 0) {
|
||||||
for (let socket of connected_sockets) {
|
for (let socket of connected_sockets) {
|
||||||
|
|
@ -52,19 +52,13 @@ async function getRoomData(io, roomID) {
|
||||||
} else {
|
} else {
|
||||||
agentsCount++;
|
agentsCount++;
|
||||||
agentIDs.push(socket.id);
|
agentIDs.push(socket.id);
|
||||||
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
|
|
||||||
if (socket.handshake.query.config !== undefined) {
|
|
||||||
config = socket.handshake.query.config;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
tabsCount = -1;
|
tabsCount = -1;
|
||||||
agentsCount = -1;
|
agentsCount = -1;
|
||||||
agentInfos = [];
|
|
||||||
agentIDs = [];
|
|
||||||
}
|
}
|
||||||
return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
|
return {tabsCount, agentsCount, tabIDs, agentIDs};
|
||||||
}
|
}
|
||||||
|
|
||||||
function processNewSocket(socket) {
|
function processNewSocket(socket) {
|
||||||
|
|
@ -84,7 +78,7 @@ async function onConnect(socket) {
|
||||||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||||
|
|
||||||
const io = getServer();
|
const io = getServer();
|
||||||
const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
|
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||||
|
|
||||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||||
|
|
@ -106,9 +100,7 @@ async function onConnect(socket) {
|
||||||
// Inform all connected agents about reconnected session
|
// Inform all connected agents about reconnected session
|
||||||
if (agentsCount > 0) {
|
if (agentsCount > 0) {
|
||||||
logger.debug(`notifying new session about agent-existence`);
|
logger.debug(`notifying new session about agent-existence`);
|
||||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
|
|
||||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
|
|
||||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||||
}
|
}
|
||||||
} else if (tabsCount <= 0) {
|
} else if (tabsCount <= 0) {
|
||||||
|
|
@ -126,8 +118,7 @@ async function onConnect(socket) {
|
||||||
// Stats
|
// Stats
|
||||||
startAssist(socket, socket.handshake.query.agentID);
|
startAssist(socket, socket.handshake.query.agentID);
|
||||||
}
|
}
|
||||||
io.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, socket.handshake.query.config);
|
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, { ...socket.handshake.query.agentInfo });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set disconnect handler
|
// Set disconnect handler
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,11 @@ package datasaver
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"openreplay/backend/pkg/db/types"
|
|
||||||
|
|
||||||
"openreplay/backend/internal/config/db"
|
"openreplay/backend/internal/config/db"
|
||||||
"openreplay/backend/pkg/db/clickhouse"
|
"openreplay/backend/pkg/db/clickhouse"
|
||||||
"openreplay/backend/pkg/db/postgres"
|
"openreplay/backend/pkg/db/postgres"
|
||||||
|
"openreplay/backend/pkg/db/types"
|
||||||
"openreplay/backend/pkg/logger"
|
"openreplay/backend/pkg/logger"
|
||||||
. "openreplay/backend/pkg/messages"
|
. "openreplay/backend/pkg/messages"
|
||||||
queue "openreplay/backend/pkg/queue/types"
|
queue "openreplay/backend/pkg/queue/types"
|
||||||
|
|
@ -51,6 +50,10 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *saverImpl) Handle(msg Message) {
|
func (s *saverImpl) Handle(msg Message) {
|
||||||
|
if msg.TypeID() == MsgCustomEvent {
|
||||||
|
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||||
session *sessions.Session
|
session *sessions.Session
|
||||||
|
|
@ -66,23 +69,6 @@ func (s *saverImpl) Handle(msg Message) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if msg.TypeID() == MsgCustomEvent {
|
|
||||||
m := msg.(*CustomEvent)
|
|
||||||
// Try to parse custom event payload to JSON and extract or_payload field
|
|
||||||
type CustomEventPayload struct {
|
|
||||||
CustomTimestamp uint64 `json:"or_timestamp"`
|
|
||||||
}
|
|
||||||
customPayload := &CustomEventPayload{}
|
|
||||||
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
|
|
||||||
if customPayload.CustomTimestamp >= session.Timestamp {
|
|
||||||
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
|
|
||||||
msg.Meta().Timestamp = customPayload.CustomTimestamp
|
|
||||||
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
defer s.Handle(types.WrapCustomEvent(m))
|
|
||||||
}
|
|
||||||
|
|
||||||
if IsMobileType(msg.TypeID()) {
|
if IsMobileType(msg.TypeID()) {
|
||||||
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
||||||
if !postgres.IsPkeyViolation(err) {
|
if !postgres.IsPkeyViolation(err) {
|
||||||
|
|
|
||||||
|
|
@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
||||||
|
|
||||||
// Add tracker version to context
|
// Add tracker version to context
|
||||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
|
||||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
|
||||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handler's logic
|
// Handler's logic
|
||||||
if req.ProjectKey == nil {
|
if req.ProjectKey == nil {
|
||||||
|
|
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
||||||
// Add projectID to context
|
// Add projectID to context
|
||||||
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
|
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
|
||||||
|
|
||||||
|
// Validate tracker version
|
||||||
|
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||||
|
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||||
|
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Check if the project supports mobile sessions
|
// Check if the project supports mobile sessions
|
||||||
if !p.IsWeb() {
|
if !p.IsWeb() {
|
||||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)
|
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)
|
||||||
|
|
|
||||||
6
ee/api/.gitignore
vendored
6
ee/api/.gitignore
vendored
|
|
@ -223,10 +223,14 @@ Pipfile.lock
|
||||||
/chalicelib/core/sessions/performance_event.py
|
/chalicelib/core/sessions/performance_event.py
|
||||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||||
|
/chalicelib/core/sessions/__init__.py
|
||||||
|
/chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||||
|
/chalicelib/core/sessions/sessions_search_exp.py
|
||||||
/chalicelib/core/metrics/modules
|
/chalicelib/core/metrics/modules
|
||||||
/chalicelib/core/socket_ios.py
|
/chalicelib/core/socket_ios.py
|
||||||
/chalicelib/core/sourcemaps
|
/chalicelib/core/sourcemaps
|
||||||
/chalicelib/core/tags.py
|
/chalicelib/core/tags.py
|
||||||
|
/chalicelib/core/product_analytics
|
||||||
/chalicelib/saml
|
/chalicelib/saml
|
||||||
/chalicelib/utils/__init__.py
|
/chalicelib/utils/__init__.py
|
||||||
/chalicelib/utils/args_transformer.py
|
/chalicelib/utils/args_transformer.py
|
||||||
|
|
@ -289,3 +293,5 @@ Pipfile.lock
|
||||||
/chalicelib/core/errors/errors_ch.py
|
/chalicelib/core/errors/errors_ch.py
|
||||||
/chalicelib/core/errors/errors_details.py
|
/chalicelib/core/errors/errors_details.py
|
||||||
/chalicelib/utils/contextual_validators.py
|
/chalicelib/utils/contextual_validators.py
|
||||||
|
/routers/subs/product_analytics.py
|
||||||
|
/schemas/product_analytics.py
|
||||||
|
|
|
||||||
|
|
@ -6,23 +6,20 @@ name = "pypi"
|
||||||
[packages]
|
[packages]
|
||||||
urllib3 = "==2.3.0"
|
urllib3 = "==2.3.0"
|
||||||
requests = "==2.32.3"
|
requests = "==2.32.3"
|
||||||
boto3 = "==1.36.12"
|
boto3 = "==1.37.16"
|
||||||
pyjwt = "==2.10.1"
|
pyjwt = "==2.10.1"
|
||||||
psycopg2-binary = "==2.9.10"
|
psycopg2-binary = "==2.9.10"
|
||||||
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
|
psycopg = {extras = ["binary", "pool"], version = "==3.2.6"}
|
||||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
|
|
||||||
clickhouse-connect = "==0.8.15"
|
clickhouse-connect = "==0.8.15"
|
||||||
elasticsearch = "==8.17.1"
|
elasticsearch = "==8.17.2"
|
||||||
jira = "==3.8.0"
|
jira = "==3.8.0"
|
||||||
cachetools = "==5.5.1"
|
cachetools = "==5.5.2"
|
||||||
fastapi = "==0.115.8"
|
fastapi = "==0.115.11"
|
||||||
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
uvicorn = {extras = ["standard"], version = "==0.34.0"}
|
||||||
gunicorn = "==23.0.0"
|
gunicorn = "==23.0.0"
|
||||||
python-decouple = "==3.8"
|
python-decouple = "==3.8"
|
||||||
pydantic = {extras = ["email"], version = "==2.10.6"}
|
pydantic = {extras = ["email"], version = "==2.10.6"}
|
||||||
apscheduler = "==3.11.0"
|
apscheduler = "==3.11.0"
|
||||||
python3-saml = "==1.16.0"
|
|
||||||
python-multipart = "==0.0.20"
|
|
||||||
redis = "==5.2.1"
|
redis = "==5.2.1"
|
||||||
azure-storage-blob = "==12.24.1"
|
azure-storage-blob = "==12.24.1"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ from chalicelib.utils import pg_client, ch_client
|
||||||
from crons import core_crons, ee_crons, core_dynamic_crons
|
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||||
from routers import core, core_dynamic
|
from routers import core, core_dynamic
|
||||||
from routers import ee
|
from routers import ee
|
||||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_anaytics
|
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot, product_analytics
|
||||||
from routers.subs import v1_api_ee
|
from routers.subs import v1_api_ee
|
||||||
|
|
||||||
if config("ENABLE_SSO", cast=bool, default=True):
|
if config("ENABLE_SSO", cast=bool, default=True):
|
||||||
|
|
@ -150,9 +150,9 @@ app.include_router(spot.public_app)
|
||||||
app.include_router(spot.app)
|
app.include_router(spot.app)
|
||||||
app.include_router(spot.app_apikey)
|
app.include_router(spot.app_apikey)
|
||||||
|
|
||||||
app.include_router(product_anaytics.public_app)
|
app.include_router(product_analytics.public_app, prefix="/ap")
|
||||||
app.include_router(product_anaytics.app)
|
app.include_router(product_analytics.app, prefix="/ap")
|
||||||
app.include_router(product_anaytics.app_apikey)
|
app.include_router(product_analytics.app_apikey, prefix="/ap")
|
||||||
|
|
||||||
if config("ENABLE_SSO", cast=bool, default=True):
|
if config("ENABLE_SSO", cast=bool, default=True):
|
||||||
app.include_router(saml.public_app)
|
app.include_router(saml.public_app)
|
||||||
|
|
|
||||||
|
|
@ -86,8 +86,7 @@ def __generic_query(typename, value_length=None):
|
||||||
ORDER BY value"""
|
ORDER BY value"""
|
||||||
|
|
||||||
if value_length is None or value_length > 2:
|
if value_length is None or value_length > 2:
|
||||||
return f"""SELECT DISTINCT ON(value, type) value, type
|
return f"""(SELECT DISTINCT value, type
|
||||||
FROM ((SELECT DISTINCT value, type
|
|
||||||
FROM {TABLE}
|
FROM {TABLE}
|
||||||
WHERE
|
WHERE
|
||||||
project_id = %(project_id)s
|
project_id = %(project_id)s
|
||||||
|
|
@ -103,7 +102,7 @@ def __generic_query(typename, value_length=None):
|
||||||
AND type='{typename.upper()}'
|
AND type='{typename.upper()}'
|
||||||
AND value ILIKE %(value)s
|
AND value ILIKE %(value)s
|
||||||
ORDER BY value
|
ORDER BY value
|
||||||
LIMIT 5)) AS raw;"""
|
LIMIT 5);"""
|
||||||
return f"""SELECT DISTINCT value, type
|
return f"""SELECT DISTINCT value, type
|
||||||
FROM {TABLE}
|
FROM {TABLE}
|
||||||
WHERE
|
WHERE
|
||||||
|
|
@ -258,7 +257,7 @@ def __search_metadata(project_id, value, key=None, source=None):
|
||||||
WHERE project_id = %(project_id)s
|
WHERE project_id = %(project_id)s
|
||||||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||||
with ch_client.ClickHouseClient() as cur:
|
with ch_client.ClickHouseClient() as cur:
|
||||||
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE
|
||||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||||
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
|
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||||
"svalue": helper.string_to_sql_like("^" + value)})
|
"svalue": helper.string_to_sql_like("^" + value)})
|
||||||
|
|
|
||||||
|
|
@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data):
|
||||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
||||||
|
|
||||||
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
|
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
|
||||||
ch_basic_query.append("error_id = %(error_id)s")
|
ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s")
|
||||||
|
|
||||||
with ch_client.ClickHouseClient() as ch:
|
with ch_client.ClickHouseClient() as ch:
|
||||||
data["startDate24"] = TimeUTC.now(-1)
|
data["startDate24"] = TimeUTC.now(-1)
|
||||||
|
|
@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data):
|
||||||
"error_id": error_id}
|
"error_id": error_id}
|
||||||
|
|
||||||
main_ch_query = f"""\
|
main_ch_query = f"""\
|
||||||
WITH pre_processed AS (SELECT error_id,
|
WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id,
|
||||||
toString(`$properties`.name) AS name,
|
toString(`$properties`.name) AS name,
|
||||||
toString(`$properties`.message) AS message,
|
toString(`$properties`.message) AS message,
|
||||||
session_id,
|
session_id,
|
||||||
|
|
@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data):
|
||||||
AND `$event_name` = 'ERROR'
|
AND `$event_name` = 'ERROR'
|
||||||
AND events.created_at >= toDateTime(timestamp / 1000)
|
AND events.created_at >= toDateTime(timestamp / 1000)
|
||||||
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
|
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
|
||||||
AND error_id = %(error_id)s
|
AND toString(`$properties`.error_id) = %(error_id)s
|
||||||
GROUP BY timestamp
|
GROUP BY timestamp
|
||||||
ORDER BY timestamp) AS chart_details
|
ORDER BY timestamp) AS chart_details
|
||||||
) AS chart_details24 ON TRUE
|
) AS chart_details24 ON TRUE
|
||||||
|
|
@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data):
|
||||||
AND `$event_name` = 'ERROR'
|
AND `$event_name` = 'ERROR'
|
||||||
AND events.created_at >= toDateTime(timestamp / 1000)
|
AND events.created_at >= toDateTime(timestamp / 1000)
|
||||||
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
|
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
|
||||||
AND error_id = %(error_id)s
|
AND toString(`$properties`.error_id) = %(error_id)s
|
||||||
GROUP BY timestamp
|
GROUP BY timestamp
|
||||||
ORDER BY timestamp) AS chart_details
|
ORDER BY timestamp) AS chart_details
|
||||||
) AS chart_details30 ON TRUE;"""
|
) AS chart_details30 ON TRUE;"""
|
||||||
|
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from decouple import config
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
from . import sessions_pg
|
|
||||||
from . import sessions_pg as sessions_legacy
|
|
||||||
from . import sessions_ch
|
|
||||||
from . import sessions_search as sessions_search_legacy
|
|
||||||
|
|
||||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
|
||||||
logger.info(">>> Using experimental sessions search")
|
|
||||||
from . import sessions_ch as sessions
|
|
||||||
from . import sessions_search_exp as sessions_search
|
|
||||||
else:
|
|
||||||
from . import sessions_pg as sessions
|
|
||||||
from . import sessions_search as sessions_search
|
|
||||||
|
|
@ -927,12 +927,12 @@ def authenticate_sso(email: str, internal_id: str):
|
||||||
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
|
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
|
||||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||||
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
|
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||||
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE, for_spot=True),
|
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE),
|
||||||
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
|
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
|
||||||
tenant_id=r['tenantId'],
|
tenant_id=r['tenantId'],
|
||||||
iat=j_r.spot_jwt_refresh_iat,
|
iat=j_r.spot_jwt_refresh_iat,
|
||||||
aud=spot.AUDIENCE,
|
aud=spot.AUDIENCE,
|
||||||
jwt_jti=j_r.spot_jwt_refresh_jti, for_spot=True),
|
jwt_jti=j_r.spot_jwt_refresh_jti),
|
||||||
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
|
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
|
||||||
}
|
}
|
||||||
return response
|
return response
|
||||||
|
|
|
||||||
|
|
@ -44,11 +44,15 @@ rm -rf ./chalicelib/core/sessions/sessions_search.py
|
||||||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||||
|
rm -rf ./chalicelib/core/sessions/__init__.py
|
||||||
|
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
|
||||||
|
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
|
||||||
rm -rf ./chalicelib/core/metrics/modules
|
rm -rf ./chalicelib/core/metrics/modules
|
||||||
rm -rf ./chalicelib/core/socket_ios.py
|
rm -rf ./chalicelib/core/socket_ios.py
|
||||||
rm -rf ./chalicelib/core/sourcemaps
|
rm -rf ./chalicelib/core/sourcemaps
|
||||||
rm -rf ./chalicelib/core/user_testing.py
|
rm -rf ./chalicelib/core/user_testing.py
|
||||||
rm -rf ./chalicelib/core/tags.py
|
rm -rf ./chalicelib/core/tags.py
|
||||||
|
rm -rf ./chalicelib/core/product_analytics
|
||||||
rm -rf ./chalicelib/saml
|
rm -rf ./chalicelib/saml
|
||||||
rm -rf ./chalicelib/utils/__init__.py
|
rm -rf ./chalicelib/utils/__init__.py
|
||||||
rm -rf ./chalicelib/utils/args_transformer.py
|
rm -rf ./chalicelib/utils/args_transformer.py
|
||||||
|
|
@ -109,3 +113,5 @@ rm -rf ./chalicelib/core/errors/errors_pg.py
|
||||||
rm -rf ./chalicelib/core/errors/errors_ch.py
|
rm -rf ./chalicelib/core/errors/errors_ch.py
|
||||||
rm -rf ./chalicelib/core/errors/errors_details.py
|
rm -rf ./chalicelib/core/errors/errors_details.py
|
||||||
rm -rf ./chalicelib/utils/contextual_validators.py
|
rm -rf ./chalicelib/utils/contextual_validators.py
|
||||||
|
rm -rf ./routers/subs/product_analytics.py
|
||||||
|
rm -rf ./schemas/product_analytics.py
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.36.12
|
boto3==1.37.16
|
||||||
pyjwt==2.10.1
|
pyjwt==2.10.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
psycopg[pool,binary]==3.2.4
|
psycopg[pool,binary]==3.2.6
|
||||||
clickhouse-driver[lz4]==0.2.9
|
|
||||||
clickhouse-connect==0.8.15
|
clickhouse-connect==0.8.15
|
||||||
elasticsearch==8.17.1
|
elasticsearch==8.17.2
|
||||||
jira==3.8.0
|
jira==3.8.0
|
||||||
cachetools==5.5.1
|
cachetools==5.5.2
|
||||||
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.11
|
||||||
uvicorn[standard]==0.34.0
|
uvicorn[standard]==0.34.0
|
||||||
python-decouple==3.8
|
python-decouple==3.8
|
||||||
pydantic[email]==2.10.6
|
pydantic[email]==2.10.6
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.36.12
|
boto3==1.37.16
|
||||||
pyjwt==2.10.1
|
pyjwt==2.10.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
psycopg[pool,binary]==3.2.4
|
psycopg[pool,binary]==3.2.6
|
||||||
clickhouse-driver[lz4]==0.2.9
|
|
||||||
clickhouse-connect==0.8.15
|
clickhouse-connect==0.8.15
|
||||||
elasticsearch==8.17.1
|
elasticsearch==8.17.2
|
||||||
jira==3.8.0
|
jira==3.8.0
|
||||||
cachetools==5.5.1
|
cachetools==5.5.2
|
||||||
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.11
|
||||||
python-decouple==3.8
|
python-decouple==3.8
|
||||||
pydantic[email]==2.10.6
|
pydantic[email]==2.10.6
|
||||||
apscheduler==3.11.0
|
apscheduler==3.11.0
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.36.12
|
boto3==1.37.16
|
||||||
pyjwt==2.10.1
|
pyjwt==2.10.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
psycopg[pool,binary]==3.2.4
|
psycopg[pool,binary]==3.2.6
|
||||||
clickhouse-driver[lz4]==0.2.9
|
|
||||||
clickhouse-connect==0.8.15
|
clickhouse-connect==0.8.15
|
||||||
elasticsearch==8.17.1
|
elasticsearch==8.17.2
|
||||||
jira==3.8.0
|
jira==3.8.0
|
||||||
cachetools==5.5.1
|
cachetools==5.5.2
|
||||||
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.11
|
||||||
uvicorn[standard]==0.34.0
|
uvicorn[standard]==0.34.0
|
||||||
gunicorn==23.0.0
|
gunicorn==23.0.0
|
||||||
python-decouple==3.8
|
python-decouple==3.8
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
from .schemas import *
|
from .schemas import *
|
||||||
from .schemas_ee import *
|
from .schemas_ee import *
|
||||||
from .assist_stats_schema import *
|
from .assist_stats_schema import *
|
||||||
|
from .product_analytics import *
|
||||||
from . import overrides as _overrides
|
from . import overrides as _overrides
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from pydantic import Field, EmailStr, field_validator, model_validator
|
||||||
|
|
||||||
from chalicelib.utils.TimeUTC import TimeUTC
|
from chalicelib.utils.TimeUTC import TimeUTC
|
||||||
from . import schemas
|
from . import schemas
|
||||||
from .overrides import BaseModel, Enum, ORUnion
|
from .overrides import BaseModel, Enum
|
||||||
from .transformers_validators import remove_whitespace
|
from .transformers_validators import remove_whitespace
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -91,33 +91,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
class SessionModel(BaseModel):
|
|
||||||
duration: int
|
|
||||||
errorsCount: int
|
|
||||||
eventsCount: int
|
|
||||||
favorite: bool = Field(default=False)
|
|
||||||
issueScore: int
|
|
||||||
issueTypes: List[schemas.IssueType] = Field(default=[])
|
|
||||||
metadata: dict = Field(default={})
|
|
||||||
pagesCount: int
|
|
||||||
platform: str
|
|
||||||
projectId: int
|
|
||||||
sessionId: str
|
|
||||||
startTs: int
|
|
||||||
timezone: Optional[str]
|
|
||||||
userAnonymousId: Optional[str]
|
|
||||||
userBrowser: str
|
|
||||||
userCity: str
|
|
||||||
userCountry: str
|
|
||||||
userDevice: Optional[str]
|
|
||||||
userDeviceType: str
|
|
||||||
userId: Optional[str]
|
|
||||||
userOs: str
|
|
||||||
userState: str
|
|
||||||
userUuid: str
|
|
||||||
viewed: bool = Field(default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class AssistRecordUpdatePayloadSchema(BaseModel):
|
class AssistRecordUpdatePayloadSchema(BaseModel):
|
||||||
name: str = Field(..., min_length=1)
|
name: str = Field(..., min_length=1)
|
||||||
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
_transform_name = field_validator('name', mode="before")(remove_whitespace)
|
||||||
|
|
|
||||||
|
|
@ -121,7 +121,16 @@ func (s *storageImpl) Get(sessionID uint64) (*Session, error) {
|
||||||
|
|
||||||
// For the ender service only
|
// For the ender service only
|
||||||
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
func (s *storageImpl) GetMany(sessionIDs []uint64) ([]*Session, error) {
|
||||||
rows, err := s.db.Query("SELECT session_id, COALESCE( duration, 0 ), start_ts FROM sessions WHERE session_id = ANY($1)", pq.Array(sessionIDs))
|
rows, err := s.db.Query(`
|
||||||
|
SELECT
|
||||||
|
session_id,
|
||||||
|
CASE
|
||||||
|
WHEN duration IS NULL OR duration < 0 THEN 0
|
||||||
|
ELSE duration
|
||||||
|
END,
|
||||||
|
start_ts
|
||||||
|
FROM sessions
|
||||||
|
WHERE session_id = ANY($1)`, pq.Array(sessionIDs))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,3 @@
|
||||||
SELECT 1
|
|
||||||
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
|
|
||||||
FROM experimental.sessions) AS raw
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
SELECT 1
|
|
||||||
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
|
|
||||||
FROM experimental.sessions) AS raw
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
ALTER TABLE experimental.sessions
|
|
||||||
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||||
|
|
||||||
SET allow_experimental_json_type = 1;
|
SET allow_experimental_json_type = 1;
|
||||||
|
|
@ -164,7 +151,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
ORDER BY (project_id, "$event_name", created_at, session_id)
|
ORDER BY (project_id, "$event_name", created_at, session_id)
|
||||||
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
TTL _timestamp + INTERVAL 1 MONTH ,
|
||||||
|
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||||
|
|
||||||
-- The list of events that should not be ingested,
|
-- The list of events that should not be ingested,
|
||||||
-- according to a specific event_name and optional properties
|
-- according to a specific event_name and optional properties
|
||||||
|
|
|
||||||
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
13
ee/scripts/schema/db/init_dbs/clickhouse/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||||
|
|
||||||
|
|
||||||
|
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||||
|
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||||
|
(
|
||||||
|
project_id UInt16,
|
||||||
|
event_name String,
|
||||||
|
property_name String,
|
||||||
|
|
||||||
|
_timestamp DateTime DEFAULT now()
|
||||||
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
|
ORDER BY (project_id, event_name, property_name);
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.23.0-ee';
|
||||||
CREATE DATABASE IF NOT EXISTS experimental;
|
CREATE DATABASE IF NOT EXISTS experimental;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||||
|
|
@ -9,7 +9,8 @@ CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(_timestamp)
|
PARTITION BY toYYYYMM(_timestamp)
|
||||||
ORDER BY (project_id, type, value);
|
ORDER BY (project_id, type, value)
|
||||||
|
TTL _timestamp + INTERVAL 1 MONTH;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.events
|
CREATE TABLE IF NOT EXISTS experimental.events
|
||||||
(
|
(
|
||||||
|
|
@ -86,7 +87,8 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(datetime)
|
PARTITION BY toYYYYMM(datetime)
|
||||||
ORDER BY (project_id, datetime, event_type, session_id, message_id);
|
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||||
|
TTL datetime + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -106,7 +108,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
||||||
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
|
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
|
||||||
user_city LowCardinality(String),
|
user_city LowCardinality(String),
|
||||||
user_state LowCardinality(String),
|
user_state LowCardinality(String),
|
||||||
platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
|
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
|
||||||
datetime DateTime,
|
datetime DateTime,
|
||||||
timezone LowCardinality(Nullable(String)),
|
timezone LowCardinality(Nullable(String)),
|
||||||
duration UInt32,
|
duration UInt32,
|
||||||
|
|
@ -138,6 +140,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMMDD(datetime)
|
PARTITION BY toYYYYMMDD(datetime)
|
||||||
ORDER BY (project_id, datetime, session_id)
|
ORDER BY (project_id, datetime, session_id)
|
||||||
|
TTL datetime + INTERVAL 3 MONTH
|
||||||
SETTINGS index_granularity = 512;
|
SETTINGS index_granularity = 512;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
||||||
|
|
@ -149,7 +152,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
||||||
sign Int8
|
sign Int8
|
||||||
) ENGINE = CollapsingMergeTree(sign)
|
) ENGINE = CollapsingMergeTree(sign)
|
||||||
PARTITION BY toYYYYMM(_timestamp)
|
PARTITION BY toYYYYMM(_timestamp)
|
||||||
ORDER BY (project_id, user_id, session_id);
|
ORDER BY (project_id, user_id, session_id)
|
||||||
|
TTL _timestamp + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||||
(
|
(
|
||||||
|
|
@ -159,7 +163,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(_timestamp)
|
PARTITION BY toYYYYMM(_timestamp)
|
||||||
ORDER BY (project_id, user_id, session_id);
|
ORDER BY (project_id, user_id, session_id)
|
||||||
|
TTL _timestamp + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
||||||
(
|
(
|
||||||
|
|
@ -169,7 +174,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(_timestamp)
|
PARTITION BY toYYYYMM(_timestamp)
|
||||||
ORDER BY (project_id, user_id, error_id);
|
ORDER BY (project_id, user_id, error_id)
|
||||||
|
TTL _timestamp + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.issues
|
CREATE TABLE IF NOT EXISTS experimental.issues
|
||||||
(
|
(
|
||||||
|
|
@ -182,7 +188,8 @@ CREATE TABLE IF NOT EXISTS experimental.issues
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(_timestamp)
|
PARTITION BY toYYYYMM(_timestamp)
|
||||||
ORDER BY (project_id, issue_id, type);
|
ORDER BY (project_id, issue_id, type)
|
||||||
|
TTL _timestamp + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -285,7 +292,8 @@ CREATE TABLE IF NOT EXISTS experimental.sessions_feature_flags
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(datetime)
|
PARTITION BY toYYYYMM(datetime)
|
||||||
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id);
|
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id)
|
||||||
|
TTL datetime + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS experimental.ios_events
|
CREATE TABLE IF NOT EXISTS experimental.ios_events
|
||||||
(
|
(
|
||||||
|
|
@ -321,7 +329,8 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
PARTITION BY toYYYYMM(datetime)
|
PARTITION BY toYYYYMM(datetime)
|
||||||
ORDER BY (project_id, datetime, event_type, session_id, message_id);
|
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||||
|
TTL datetime + INTERVAL 3 MONTH;
|
||||||
|
|
||||||
|
|
||||||
SET allow_experimental_json_type = 1;
|
SET allow_experimental_json_type = 1;
|
||||||
|
|
@ -475,7 +484,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
|
||||||
_timestamp DateTime DEFAULT now()
|
_timestamp DateTime DEFAULT now()
|
||||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
ORDER BY (project_id, "$event_name", created_at, session_id)
|
ORDER BY (project_id, "$event_name", created_at, session_id)
|
||||||
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
TTL _timestamp + INTERVAL 1 MONTH ,
|
||||||
|
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||||
|
|
||||||
-- The list of events that should not be ingested,
|
-- The list of events that should not be ingested,
|
||||||
-- according to a specific event_name and optional properties
|
-- according to a specific event_name and optional properties
|
||||||
|
|
@ -644,6 +654,17 @@ CREATE TABLE IF NOT EXISTS product_analytics.all_events
|
||||||
ORDER BY (project_id, event_name);
|
ORDER BY (project_id, event_name);
|
||||||
|
|
||||||
|
|
||||||
|
-- The full list of event-properties (used to tell which property belongs to which event)
|
||||||
|
CREATE TABLE IF NOT EXISTS product_analytics.event_properties
|
||||||
|
(
|
||||||
|
project_id UInt16,
|
||||||
|
event_name String,
|
||||||
|
property_name String,
|
||||||
|
|
||||||
|
_timestamp DateTime DEFAULT now()
|
||||||
|
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||||
|
ORDER BY (project_id, event_name, property_name);
|
||||||
|
|
||||||
-- The full list of properties (events and users)
|
-- The full list of properties (events and users)
|
||||||
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
CREATE TABLE IF NOT EXISTS product_analytics.all_properties
|
||||||
(
|
(
|
||||||
|
|
|
||||||
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
30
ee/scripts/schema/db/init_dbs/postgresql/1.23.0/1.23.0.sql
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
\set previous_version 'v1.22.0-ee'
|
||||||
|
\set next_version 'v1.23.0-ee'
|
||||||
|
SELECT openreplay_version() AS current_version,
|
||||||
|
openreplay_version() = :'previous_version' AS valid_previous,
|
||||||
|
openreplay_version() = :'next_version' AS is_next
|
||||||
|
\gset
|
||||||
|
|
||||||
|
\if :valid_previous
|
||||||
|
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
|
||||||
|
BEGIN;
|
||||||
|
SELECT format($fn_def$
|
||||||
|
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||||
|
RETURNS text AS
|
||||||
|
$$
|
||||||
|
SELECT '%1$s'
|
||||||
|
$$ LANGUAGE sql IMMUTABLE;
|
||||||
|
$fn_def$, :'next_version')
|
||||||
|
\gexec
|
||||||
|
|
||||||
|
--
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
\elif :is_next
|
||||||
|
\echo new version detected :'next_version', nothing to do
|
||||||
|
\else
|
||||||
|
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||||
|
\endif
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
\set or_version 'v1.22.0-ee'
|
\set or_version 'v1.23.0-ee'
|
||||||
SET client_min_messages TO NOTICE;
|
SET client_min_messages TO NOTICE;
|
||||||
\set ON_ERROR_STOP true
|
\set ON_ERROR_STOP true
|
||||||
SELECT EXISTS (SELECT 1
|
SELECT EXISTS (SELECT 1
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS product_analytics.event_properties;
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
\set previous_version 'v1.23.0-ee'
|
||||||
|
\set next_version 'v1.22.0-ee'
|
||||||
|
SELECT openreplay_version() AS current_version,
|
||||||
|
openreplay_version() = :'previous_version' AS valid_previous,
|
||||||
|
openreplay_version() = :'next_version' AS is_next
|
||||||
|
\gset
|
||||||
|
|
||||||
|
\if :valid_previous
|
||||||
|
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
|
||||||
|
BEGIN;
|
||||||
|
SELECT format($fn_def$
|
||||||
|
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||||
|
RETURNS text AS
|
||||||
|
$$
|
||||||
|
SELECT '%1$s'
|
||||||
|
$$ LANGUAGE sql IMMUTABLE;
|
||||||
|
$fn_def$, :'next_version')
|
||||||
|
\gexec
|
||||||
|
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
\elif :is_next
|
||||||
|
\echo new version detected :'next_version', nothing to do
|
||||||
|
\else
|
||||||
|
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
|
||||||
|
\endif
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
||||||
|
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
||||||
import React, { Suspense, lazy } from 'react';
|
import React, { Suspense, lazy } from 'react';
|
||||||
import { Redirect, Route, Switch } from 'react-router-dom';
|
import { Redirect, Route, Switch } from 'react-router-dom';
|
||||||
import { observer } from 'mobx-react-lite';
|
import { observer } from 'mobx-react-lite';
|
||||||
|
|
@ -9,7 +10,7 @@ import { Loader } from 'UI';
|
||||||
|
|
||||||
import APIClient from './api_client';
|
import APIClient from './api_client';
|
||||||
import * as routes from './routes';
|
import * as routes from './routes';
|
||||||
import { debounceCall } from '@/utils';
|
import { debounce } from '@/utils';
|
||||||
|
|
||||||
const components: any = {
|
const components: any = {
|
||||||
SessionPure: lazy(() => import('Components/Session/Session')),
|
SessionPure: lazy(() => import('Components/Session/Session')),
|
||||||
|
|
@ -87,6 +88,7 @@ const ASSIST_PATH = routes.assist();
|
||||||
const LIVE_SESSION_PATH = routes.liveSession();
|
const LIVE_SESSION_PATH = routes.liveSession();
|
||||||
const MULTIVIEW_PATH = routes.multiview();
|
const MULTIVIEW_PATH = routes.multiview();
|
||||||
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
|
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
|
||||||
|
const ASSIST_STATS_PATH = routes.assistStats();
|
||||||
|
|
||||||
const USABILITY_TESTING_PATH = routes.usabilityTesting();
|
const USABILITY_TESTING_PATH = routes.usabilityTesting();
|
||||||
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
|
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
|
||||||
|
|
@ -97,6 +99,7 @@ const SPOT_PATH = routes.spot();
|
||||||
const SCOPE_SETUP = routes.scopeSetup();
|
const SCOPE_SETUP = routes.scopeSetup();
|
||||||
|
|
||||||
const HIGHLIGHTS_PATH = routes.highlights();
|
const HIGHLIGHTS_PATH = routes.highlights();
|
||||||
|
let debounceSearch: any = () => {};
|
||||||
|
|
||||||
function PrivateRoutes() {
|
function PrivateRoutes() {
|
||||||
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
|
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
|
||||||
|
|
@ -121,10 +124,14 @@ function PrivateRoutes() {
|
||||||
}
|
}
|
||||||
}, [siteId]);
|
}, [siteId]);
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
debounceSearch = debounce(() => searchStore.fetchSessions(), 500);
|
||||||
|
}, []);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
if (!searchStore.urlParsed) return;
|
if (!searchStore.urlParsed) return;
|
||||||
debounceCall(() => searchStore.fetchSessions(true), 250)()
|
debounceSearch();
|
||||||
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
}, [searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Suspense fallback={<Loader loading className="flex-1" />}>
|
<Suspense fallback={<Loader loading className="flex-1" />}>
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import cn from 'classnames';
|
import cn from 'classnames';
|
||||||
import Counter from 'App/components/shared/SessionItem/Counter';
|
import Counter from 'App/components/shared/SessionItem/Counter';
|
||||||
import { useDraggable } from '@neodrag/react';
|
import Draggable from 'react-draggable';
|
||||||
import type { LocalStream } from 'Player';
|
import type { LocalStream } from 'Player';
|
||||||
import { PlayerContext } from 'App/components/Session/playerContext';
|
import { PlayerContext } from 'App/components/Session/playerContext';
|
||||||
import ChatControls from '../ChatControls/ChatControls';
|
import ChatControls from '../ChatControls/ChatControls';
|
||||||
|
|
@ -25,8 +25,6 @@ function ChatWindow({
|
||||||
isPrestart,
|
isPrestart,
|
||||||
}: Props) {
|
}: Props) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const dragRef = React.useRef<HTMLDivElement>(null);
|
|
||||||
useDraggable(dragRef, { bounds: 'body', defaultPosition: { x: 50, y: 200 } })
|
|
||||||
const { player } = React.useContext(PlayerContext);
|
const { player } = React.useContext(PlayerContext);
|
||||||
|
|
||||||
const { toggleVideoLocalStream } = player.assistManager;
|
const { toggleVideoLocalStream } = player.assistManager;
|
||||||
|
|
@ -41,7 +39,11 @@ function ChatWindow({
|
||||||
}, [localVideoEnabled]);
|
}, [localVideoEnabled]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div ref={dragRef}>
|
<Draggable
|
||||||
|
handle=".handle"
|
||||||
|
bounds="body"
|
||||||
|
defaultPosition={{ x: 50, y: 200 }}
|
||||||
|
>
|
||||||
<div
|
<div
|
||||||
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
|
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
|
||||||
style={{ width: '280px' }}
|
style={{ width: '280px' }}
|
||||||
|
|
@ -100,7 +102,7 @@ function ChatWindow({
|
||||||
isPrestart={isPrestart}
|
isPrestart={isPrestart}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</Draggable>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,10 +16,10 @@ function ProfilerDoc() {
|
||||||
? sites.find((site) => site.id === siteId)?.projectKey
|
? sites.find((site) => site.id === siteId)?.projectKey
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
|
|
||||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
const usage = `import { tracker } from '@openreplay/tracker';
|
||||||
import trackerProfiler from '@openreplay/tracker-profiler';
|
import trackerProfiler from '@openreplay/tracker-profiler';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
|
||||||
const fn = profiler('call_name')(() => {
|
const fn = profiler('call_name')(() => {
|
||||||
//...
|
//...
|
||||||
}, thisArg); // thisArg is optional`;
|
}, thisArg); // thisArg is optional`;
|
||||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
|
|
||||||
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
|
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
//...
|
//...
|
||||||
|
|
|
||||||
|
|
@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
function AssistNpm(props) {
|
function AssistNpm(props) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
const usage = `import { tracker } from '@openreplay/tracker';
|
||||||
import trackerAssist from '@openreplay/tracker-assist';
|
import trackerAssist from '@openreplay/tracker-assist';
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${props.projectKey}',
|
projectKey: '${props.projectKey}',
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
||||||
tracker.use(trackerAssist(options)); // check the list of available options below`;
|
tracker.use(trackerAssist(options)); // check the list of available options below`;
|
||||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerAssist from '@openreplay/tracker-assist/cjs';
|
import trackerAssist from '@openreplay/tracker-assist/cjs';
|
||||||
const tracker = new OpenReplay({
|
|
||||||
|
tracker.configure({
|
||||||
projectKey: '${props.projectKey}'
|
projectKey: '${props.projectKey}'
|
||||||
});
|
});
|
||||||
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below
|
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below
|
||||||
|
|
|
||||||
|
|
@ -14,19 +14,20 @@ function GraphQLDoc() {
|
||||||
const projectKey = siteId
|
const projectKey = siteId
|
||||||
? sites.find((site) => site.id === siteId)?.projectKey
|
? sites.find((site) => site.id === siteId)?.projectKey
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
const usage = `import OpenReplay from '@openreplay/tracker';
|
const usage = `import { tracker } from '@openreplay/tracker';
|
||||||
import trackerGraphQL from '@openreplay/tracker-graphql';
|
import trackerGraphQL from '@openreplay/tracker-graphql';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
//...
|
//...
|
||||||
export const recordGraphQL = tracker.use(trackerGraphQL());`;
|
export const recordGraphQL = tracker.use(trackerGraphQL());`;
|
||||||
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
|
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
//...
|
//...
|
||||||
|
|
|
||||||
|
|
@ -15,20 +15,21 @@ function MobxDoc() {
|
||||||
? sites.find((site) => site.id === siteId)?.projectKey
|
? sites.find((site) => site.id === siteId)?.projectKey
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
|
|
||||||
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
|
const mobxUsage = `import { tracker } from '@openreplay/tracker';
|
||||||
import trackerMobX from '@openreplay/tracker-mobx';
|
import trackerMobX from '@openreplay/tracker-mobx';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||||
tracker.start();
|
tracker.start();
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
|
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerMobX from '@openreplay/tracker-mobx/cjs';
|
import trackerMobX from '@openreplay/tracker-mobx/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.use(trackerMobX(<options>)); // check list of available options below
|
tracker.use(trackerMobX(<options>)); // check list of available options below
|
||||||
|
|
|
||||||
|
|
@ -16,10 +16,10 @@ function NgRxDoc() {
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
const usage = `import { StoreModule } from '@ngrx/store';
|
const usage = `import { StoreModule } from '@ngrx/store';
|
||||||
import { reducers } from './reducers';
|
import { reducers } from './reducers';
|
||||||
import OpenReplay from '@openreplay/tracker';
|
import { tracker } from '@openreplay/tracker';
|
||||||
import trackerNgRx from '@openreplay/tracker-ngrx';
|
import trackerNgRx from '@openreplay/tracker-ngrx';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
|
||||||
export class AppModule {}`;
|
export class AppModule {}`;
|
||||||
const usageCjs = `import { StoreModule } from '@ngrx/store';
|
const usageCjs = `import { StoreModule } from '@ngrx/store';
|
||||||
import { reducers } from './reducers';
|
import { reducers } from './reducers';
|
||||||
import OpenReplay from '@openreplay/tracker/cjs';
|
import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
|
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
//...
|
//...
|
||||||
|
|
|
||||||
|
|
@ -17,10 +17,10 @@ function PiniaDoc() {
|
||||||
? sites.find((site) => site.id === siteId)?.projectKey
|
? sites.find((site) => site.id === siteId)?.projectKey
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
const usage = `import Vuex from 'vuex'
|
const usage = `import Vuex from 'vuex'
|
||||||
import OpenReplay from '@openreplay/tracker';
|
import { tracker } from '@openreplay/tracker';
|
||||||
import trackerVuex from '@openreplay/tracker-vuex';
|
import trackerVuex from '@openreplay/tracker-vuex';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
|
||||||
|
|
@ -16,10 +16,10 @@ function ReduxDoc() {
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
|
|
||||||
const usage = `import { applyMiddleware, createStore } from 'redux';
|
const usage = `import { applyMiddleware, createStore } from 'redux';
|
||||||
import OpenReplay from '@openreplay/tracker';
|
import { tracker } from '@openreplay/tracker';
|
||||||
import trackerRedux from '@openreplay/tracker-redux';
|
import trackerRedux from '@openreplay/tracker-redux';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
@ -29,10 +29,11 @@ const store = createStore(
|
||||||
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
|
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
|
||||||
);`;
|
);`;
|
||||||
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
|
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
|
||||||
import OpenReplay from '@openreplay/tracker/cjs';
|
import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerRedux from '@openreplay/tracker-redux/cjs';
|
import trackerRedux from '@openreplay/tracker-redux/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
//...
|
//...
|
||||||
|
|
|
||||||
|
|
@ -16,10 +16,10 @@ function VueDoc() {
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
|
|
||||||
const usage = `import Vuex from 'vuex'
|
const usage = `import Vuex from 'vuex'
|
||||||
import OpenReplay from '@openreplay/tracker';
|
import { tracker } from '@openreplay/tracker';
|
||||||
import trackerVuex from '@openreplay/tracker-vuex';
|
import trackerVuex from '@openreplay/tracker-vuex';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
tracker.start()
|
tracker.start()
|
||||||
|
|
@ -29,10 +29,11 @@ const store = new Vuex.Store({
|
||||||
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
|
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
|
||||||
});`;
|
});`;
|
||||||
const usageCjs = `import Vuex from 'vuex'
|
const usageCjs = `import Vuex from 'vuex'
|
||||||
import OpenReplay from '@openreplay/tracker/cjs';
|
import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
||||||
//...
|
//...
|
||||||
const tracker = new OpenReplay({
|
tracker.configure({
|
||||||
projectKey: '${projectKey}'
|
projectKey: '${projectKey}'
|
||||||
});
|
});
|
||||||
//...
|
//...
|
||||||
|
|
|
||||||
|
|
@ -16,11 +16,10 @@ function ZustandDoc(props) {
|
||||||
: sites[0]?.projectKey;
|
: sites[0]?.projectKey;
|
||||||
|
|
||||||
const usage = `import create from "zustand";
|
const usage = `import create from "zustand";
|
||||||
import Tracker from '@openreplay/tracker';
|
import { tracker } from '@openreplay/tracker';
|
||||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
|
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
|
||||||
|
|
||||||
|
tracker.configure({
|
||||||
const tracker = new Tracker({
|
|
||||||
projectKey: ${projectKey},
|
projectKey: ${projectKey},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -43,11 +42,12 @@ const useBearStore = create(
|
||||||
)
|
)
|
||||||
`;
|
`;
|
||||||
const usageCjs = `import create from "zustand";
|
const usageCjs = `import create from "zustand";
|
||||||
import Tracker from '@openreplay/tracker/cjs';
|
import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
|
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
|
||||||
|
|
||||||
|
|
||||||
const tracker = new Tracker({
|
tracker.configure({
|
||||||
projectKey: ${projectKey},
|
projectKey: ${projectKey},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ function ModuleCard(props: Props) {
|
||||||
<Switch
|
<Switch
|
||||||
size="small"
|
size="small"
|
||||||
checked={!module.isEnabled}
|
checked={!module.isEnabled}
|
||||||
title={!module.isEnabled ? 'Enabled' : 'Disabled'}
|
title={module.isEnabled ? 'Enabled' : 'Disabled'}
|
||||||
onChange={() => props.onToggle(module)}
|
onChange={() => props.onToggle(module)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -40,12 +40,11 @@ function Modules() {
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const moduleList = list(t)
|
list(t).forEach((module) => {
|
||||||
moduleList.forEach((module) => {
|
|
||||||
module.isEnabled = modules.includes(module.key);
|
module.isEnabled = modules.includes(module.key);
|
||||||
});
|
});
|
||||||
setModulesState(
|
setModulesState(
|
||||||
moduleList.filter(
|
list(t).filter(
|
||||||
(module) => !module.hidden && (!module.enterprise || isEnterprise),
|
(module) => !module.hidden && (!module.enterprise || isEnterprise),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ import withPageTitle from 'HOCs/withPageTitle';
|
||||||
import { PageTitle } from 'UI';
|
import { PageTitle } from 'UI';
|
||||||
import { observer } from 'mobx-react-lite';
|
import { observer } from 'mobx-react-lite';
|
||||||
import { useStore } from 'App/mstore';
|
import { useStore } from 'App/mstore';
|
||||||
|
import LanguageSwitcher from "App/components/LanguageSwitcher";
|
||||||
import Settings from './Settings';
|
import Settings from './Settings';
|
||||||
import ChangePassword from './ChangePassword';
|
import ChangePassword from './ChangePassword';
|
||||||
import styles from './profileSettings.module.css';
|
import styles from './profileSettings.module.css';
|
||||||
|
|
@ -20,107 +21,90 @@ function ProfileSettings() {
|
||||||
return (
|
return (
|
||||||
<div className="bg-white rounded-lg border shadow-sm p-5">
|
<div className="bg-white rounded-lg border shadow-sm p-5">
|
||||||
<PageTitle title={<div>{t('Account')}</div>} />
|
<PageTitle title={<div>{t('Account')}</div>} />
|
||||||
<div className="flex items-center">
|
<Section
|
||||||
<div className={styles.left}>
|
title={t('Profile')}
|
||||||
<h4 className="text-lg mb-4">{t('Profile')}</h4>
|
description={t('Your email address is your identity on OpenReplay and is used to login.')}
|
||||||
<div className={styles.info}>
|
children={<Settings />}
|
||||||
{t(
|
/>
|
||||||
'Your email address is your identity on OpenReplay and is used to login.',
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Settings />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="border-b my-10" />
|
<div className="border-b my-10" />
|
||||||
|
|
||||||
{account.hasPassword && (
|
{account.hasPassword && (
|
||||||
<>
|
<>
|
||||||
<div className="flex items-center">
|
<Section
|
||||||
<div className={styles.left}>
|
title={t('Change Password')}
|
||||||
<h4 className="text-lg mb-4">{t('Change Password')}</h4>
|
description={t('Updating your password from time to time enhaces your account’s security')}
|
||||||
<div className={styles.info}>
|
children={<ChangePassword />}
|
||||||
{t('Updating your password from time to time enhances your account’s security.')}
|
/>
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<ChangePassword />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="border-b my-10" />
|
<div className="border-b my-10" />
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div className="flex items-center">
|
<Section
|
||||||
<div className={styles.left}>
|
title={t('Interface Language')}
|
||||||
<h4 className="text-lg mb-4">{t('Organization API Key')}</h4>
|
description={t('Select the language in which OpenReplay will appear.')}
|
||||||
<div className={styles.info}>
|
children={<LanguageSwitcher />}
|
||||||
{t('Your API key gives you access to an extra set of services.')}
|
/>
|
||||||
</div>
|
|
||||||
</div>
|
<Section
|
||||||
<div>
|
title={t('Organization API Key')}
|
||||||
<Api />
|
description={t('Your API key gives you access to an extra set of services.')}
|
||||||
</div>
|
children={<Api />}
|
||||||
</div>
|
/>
|
||||||
|
|
||||||
{isEnterprise && (account.admin || account.superAdmin) && (
|
{isEnterprise && (account.admin || account.superAdmin) && (
|
||||||
<>
|
<>
|
||||||
<div className="border-b my-10" />
|
<div className="border-b my-10" />
|
||||||
<div className="flex items-center">
|
<Section
|
||||||
<div className={styles.left}>
|
title={t('Tenant Key')}
|
||||||
<h4 className="text-lg mb-4">{t('Tenant Key')}</h4>
|
description={t('For SSO (SAML) authentication.')}
|
||||||
<div className={styles.info}>
|
children={<TenantKey />}
|
||||||
{t('For SSO (SAML) authentication.')}
|
/>
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<TenantKey />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{!isEnterprise && (
|
{!isEnterprise && (
|
||||||
<>
|
<>
|
||||||
<div className="border-b my-10" />
|
<div className="border-b my-10" />
|
||||||
<div className="flex items-center">
|
<Section
|
||||||
<div className={styles.left}>
|
title={t('Data Collection')}
|
||||||
<h4 className="text-lg mb-4">{t('Data Collection')}</h4>
|
description={t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
||||||
<div className={styles.info}>
|
children={<OptOut />}
|
||||||
{t('Enables you to control how OpenReplay captures data on your organization’s usage to improve our product.')}
|
/>
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<OptOut />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{account.license && (
|
{account.license && (
|
||||||
<>
|
<>
|
||||||
<div className="border-b my-10" />
|
<div className="border-b my-10" />
|
||||||
|
<Section title={t('License')} description={t('License key and expiration date.')} children={<Licenses />} />
|
||||||
<div className="flex items-center">
|
|
||||||
<div className={styles.left}>
|
|
||||||
<h4 className="text-lg mb-4">{t('License')}</h4>
|
|
||||||
<div className={styles.info}>
|
|
||||||
{t('License key and expiration date.')}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Licenses />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function Section({ title, description, children }: {
|
||||||
|
title: string;
|
||||||
|
description: string;
|
||||||
|
children: React.ReactNode;
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className={styles.left}>
|
||||||
|
<h4 className="text-lg mb-4">{title}</h4>
|
||||||
|
<div className={styles.info}>
|
||||||
|
{description}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export default withPageTitle('Account - OpenReplay Preferences')(
|
export default withPageTitle('Account - OpenReplay Preferences')(
|
||||||
observer(ProfileSettings),
|
observer(ProfileSettings),
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying';
|
||||||
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
|
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
|
||||||
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
|
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
|
||||||
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
|
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
|
||||||
import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode';
|
|
||||||
import DebugLog from './DebugLog';
|
import DebugLog from './DebugLog';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
|
|
@ -36,7 +35,6 @@ function SessionsListingSettings() {
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-2">
|
||||||
<MouseTrailSettings />
|
<MouseTrailSettings />
|
||||||
<DebugLog />
|
<DebugLog />
|
||||||
<VirtualModeSettings />
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import CardSessionsByList from 'Components/Dashboard/Widgets/CardSessionsByList'
|
||||||
import { useModal } from 'Components/ModalContext';
|
import { useModal } from 'Components/ModalContext';
|
||||||
import Widget from '@/mstore/types/widget';
|
import Widget from '@/mstore/types/widget';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { FilterKey } from 'Types/filter/filterType';
|
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
metric?: any;
|
metric?: any;
|
||||||
|
|
@ -36,20 +35,20 @@ function SessionsBy(props: Props) {
|
||||||
...filtersMap[metric.metricOf],
|
...filtersMap[metric.metricOf],
|
||||||
value: [data.name],
|
value: [data.name],
|
||||||
type: filtersMap[metric.metricOf].key,
|
type: filtersMap[metric.metricOf].key,
|
||||||
filters: [],
|
filters: filtersMap[metric.metricOf].filters?.map((f: any) => {
|
||||||
|
const {
|
||||||
|
key,
|
||||||
|
operatorOptions,
|
||||||
|
category,
|
||||||
|
icon,
|
||||||
|
label,
|
||||||
|
options,
|
||||||
|
...cleaned
|
||||||
|
} = f;
|
||||||
|
return { ...cleaned, type: f.key, value: [] };
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (metric.metricOf === FilterKey.FETCH) {
|
|
||||||
baseFilter.filters = [
|
|
||||||
{
|
|
||||||
key: FilterKey.FETCH_URL,
|
|
||||||
operator: 'is',
|
|
||||||
value: [data.name],
|
|
||||||
type: FilterKey.FETCH_URL,
|
|
||||||
}
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
key,
|
key,
|
||||||
operatorOptions,
|
operatorOptions,
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,6 @@ function BottomButtons({
|
||||||
<Button
|
<Button
|
||||||
loading={loading}
|
loading={loading}
|
||||||
type="primary"
|
type="primary"
|
||||||
htmlType="submit"
|
|
||||||
disabled={loading || !instance.validate()}
|
disabled={loading || !instance.validate()}
|
||||||
id="submit-button"
|
id="submit-button"
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ function ClickMapRagePicker() {
|
||||||
<Checkbox onChange={onToggle} label={t('Include rage clicks')} />
|
<Checkbox onChange={onToggle} label={t('Include rage clicks')} />
|
||||||
|
|
||||||
<Button size="small" onClick={refreshHeatmapSession}>
|
<Button size="small" onClick={refreshHeatmapSession}>
|
||||||
{t('Get new image')}
|
{t('Get new session')}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,6 @@ function DashboardView(props: Props) {
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
dashboardStore.resetPeriod();
|
|
||||||
if (queryParams.has('modal')) {
|
if (queryParams.has('modal')) {
|
||||||
onAddWidgets();
|
onAddWidgets();
|
||||||
trimQuery();
|
trimQuery();
|
||||||
|
|
|
||||||
|
|
@ -117,6 +117,8 @@ const ListView: React.FC<Props> = ({
|
||||||
if (disableSelection) {
|
if (disableSelection) {
|
||||||
const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
|
const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
|
||||||
history.push(path);
|
history.push(path);
|
||||||
|
} else {
|
||||||
|
toggleSelection?.(metric.metricId);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -68,7 +68,7 @@ function MetricsList({
|
||||||
}, [metricStore]);
|
}, [metricStore]);
|
||||||
|
|
||||||
|
|
||||||
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== '';
|
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== 'all';
|
||||||
|
|
||||||
const searchImageDimensions = { width: 60, height: 'auto' };
|
const searchImageDimensions = { width: 60, height: 'auto' };
|
||||||
const defaultImageDimensions = { width: 600, height: 'auto' };
|
const defaultImageDimensions = { width: 600, height: 'auto' };
|
||||||
|
|
|
||||||
|
|
@ -181,10 +181,9 @@ function WidgetChart(props: Props) {
|
||||||
}
|
}
|
||||||
prevMetricRef.current = _metric;
|
prevMetricRef.current = _metric;
|
||||||
const timestmaps = drillDownPeriod.toTimestamps();
|
const timestmaps = drillDownPeriod.toTimestamps();
|
||||||
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
|
|
||||||
const payload = isSaved
|
const payload = isSaved
|
||||||
? { ...metricParams, density }
|
? { ...metricParams }
|
||||||
: { ...params, ...timestmaps, ..._metric.toJson(), density };
|
: { ...params, ...timestmaps, ..._metric.toJson() };
|
||||||
debounceRequest(
|
debounceRequest(
|
||||||
_metric,
|
_metric,
|
||||||
payload,
|
payload,
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ function RangeGranularity({
|
||||||
}
|
}
|
||||||
|
|
||||||
const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
|
const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
|
||||||
export function calculateGranularities(periodDurationMs: number) {
|
function calculateGranularities(periodDurationMs: number) {
|
||||||
const granularities = [
|
const granularities = [
|
||||||
{ label: 'Hourly', durationMs: 60 * 60 * 1000 },
|
{ label: 'Hourly', durationMs: 60 * 60 * 1000 },
|
||||||
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },
|
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },
|
||||||
|
|
|
||||||
|
|
@ -1,395 +1,376 @@
|
||||||
import React, {useEffect, useState} from 'react';
|
import React, { useEffect, useState } from 'react';
|
||||||
import {NoContent, Loader, Pagination} from 'UI';
|
import { NoContent, Loader, Pagination } from 'UI';
|
||||||
import {Button, Tag, Tooltip, Dropdown, message} from 'antd';
|
import { Button, Tag, Tooltip, Dropdown, message } from 'antd';
|
||||||
import {UndoOutlined, DownOutlined} from '@ant-design/icons';
|
import { UndoOutlined, DownOutlined } from '@ant-design/icons';
|
||||||
import cn from 'classnames';
|
import cn from 'classnames';
|
||||||
import {useStore} from 'App/mstore';
|
import { useStore } from 'App/mstore';
|
||||||
import SessionItem from 'Shared/SessionItem';
|
import SessionItem from 'Shared/SessionItem';
|
||||||
import {observer} from 'mobx-react-lite';
|
import { observer } from 'mobx-react-lite';
|
||||||
import {DateTime} from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import {debounce, numberWithCommas} from 'App/utils';
|
import { debounce, numberWithCommas } from 'App/utils';
|
||||||
import useIsMounted from 'App/hooks/useIsMounted';
|
import useIsMounted from 'App/hooks/useIsMounted';
|
||||||
import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG';
|
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||||
import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card';
|
import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card';
|
||||||
import {useTranslation} from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
className?: string;
|
className?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function WidgetSessions(props: Props) {
|
function WidgetSessions(props: Props) {
|
||||||
const {t} = useTranslation();
|
const { t } = useTranslation();
|
||||||
const listRef = React.useRef<HTMLDivElement>(null);
|
const listRef = React.useRef<HTMLDivElement>(null);
|
||||||
const {className = ''} = props;
|
const { className = '' } = props;
|
||||||
const [activeSeries, setActiveSeries] = useState('all');
|
const [activeSeries, setActiveSeries] = useState('all');
|
||||||
const [data, setData] = useState<any>([]);
|
const [data, setData] = useState<any>([]);
|
||||||
const isMounted = useIsMounted();
|
const isMounted = useIsMounted();
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
// all filtering done through series now
|
// all filtering done through series now
|
||||||
const filteredSessions = getListSessionsBySeries(data, 'all');
|
const filteredSessions = getListSessionsBySeries(data, 'all');
|
||||||
const {dashboardStore, metricStore, sessionStore, customFieldStore} =
|
const { dashboardStore, metricStore, sessionStore, customFieldStore } =
|
||||||
useStore();
|
useStore();
|
||||||
const focusedSeries = metricStore.focusedSeriesName;
|
const focusedSeries = metricStore.focusedSeriesName;
|
||||||
const filter = dashboardStore.drillDownFilter;
|
const filter = dashboardStore.drillDownFilter;
|
||||||
const widget = metricStore.instance;
|
const widget = metricStore.instance;
|
||||||
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
|
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
|
||||||
'LLL dd, yyyy HH:mm',
|
'LLL dd, yyyy HH:mm',
|
||||||
);
|
);
|
||||||
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
|
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
|
||||||
'LLL dd, yyyy HH:mm',
|
'LLL dd, yyyy HH:mm',
|
||||||
);
|
);
|
||||||
const [seriesOptions, setSeriesOptions] = useState([
|
const [seriesOptions, setSeriesOptions] = useState([
|
||||||
{label: t('All'), value: 'all'},
|
{ label: t('All'), value: 'all' },
|
||||||
]);
|
]);
|
||||||
const hasFilters =
|
const hasFilters =
|
||||||
filter.filters.length > 0 ||
|
filter.filters.length > 0 ||
|
||||||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
|
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
|
||||||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
|
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
|
||||||
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
|
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
|
||||||
const metaList = customFieldStore.list.map((i: any) => i.key);
|
const metaList = customFieldStore.list.map((i: any) => i.key);
|
||||||
|
|
||||||
const seriesDropdownItems = seriesOptions.map((option) => ({
|
const seriesDropdownItems = seriesOptions.map((option) => ({
|
||||||
key: option.value,
|
key: option.value,
|
||||||
label: (
|
label: (
|
||||||
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
|
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
|
||||||
),
|
),
|
||||||
|
}));
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!widget.series) return;
|
||||||
|
const seriesOptions = widget.series.map((item: any) => ({
|
||||||
|
label: item.name,
|
||||||
|
value: item.seriesId ?? item.name,
|
||||||
}));
|
}));
|
||||||
|
setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]);
|
||||||
|
}, [widget.series.length]);
|
||||||
|
|
||||||
useEffect(() => {
|
const fetchSessions = (metricId: any, filter: any) => {
|
||||||
if (!widget.series) return;
|
if (!isMounted()) return;
|
||||||
const seriesOptions = widget.series.map((item: any) => ({
|
setLoading(true);
|
||||||
label: item.name,
|
delete filter.eventsOrderSupport;
|
||||||
value: item.seriesId ?? item.name,
|
if (widget.metricType === FUNNEL) {
|
||||||
}));
|
if (filter.series[0].filter.filters.length === 0) {
|
||||||
setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]);
|
setLoading(false);
|
||||||
}, [widget.series.length]);
|
return setData([]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const fetchSessions = (metricId: any, filter: any) => {
|
widget
|
||||||
if (!isMounted()) return;
|
.fetchSessions(metricId, filter)
|
||||||
|
.then((res: any) => {
|
||||||
if (widget.metricType === FUNNEL) {
|
setData(res);
|
||||||
if (filter.series[0].filter.filters.length === 0) {
|
if (metricStore.drillDown) {
|
||||||
setLoading(false);
|
setTimeout(() => {
|
||||||
return setData([]);
|
message.info(t('Sessions Refreshed!'));
|
||||||
}
|
listRef.current?.scrollIntoView({ behavior: 'smooth' });
|
||||||
|
metricStore.setDrillDown(false);
|
||||||
|
}, 0);
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
setLoading(false);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
|
||||||
|
sessionStore.getSessions(customFilters).then((data) => {
|
||||||
|
setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
const debounceRequest: any = React.useCallback(
|
||||||
|
debounce(fetchSessions, 1000),
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
const debounceClickMapSearch = React.useCallback(
|
||||||
|
debounce(fetchClickmapSessions, 1000),
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
|
||||||
|
const depsString = JSON.stringify(widget.series);
|
||||||
|
|
||||||
setLoading(true);
|
const loadData = () => {
|
||||||
const filterCopy = {...filter};
|
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
|
||||||
delete filterCopy.eventsOrderSupport;
|
const clickFilter = {
|
||||||
|
value: [metricStore.clickMapSearch],
|
||||||
try {
|
type: 'CLICK',
|
||||||
// Handle filters properly with null checks
|
operator: 'onSelector',
|
||||||
if (filterCopy.filters && filterCopy.filters.length > 0) {
|
isEvent: true,
|
||||||
// Ensure the nested path exists before pushing
|
// @ts-ignore
|
||||||
if (filterCopy.series?.[0]?.filter) {
|
filters: [],
|
||||||
if (!filterCopy.series[0].filter.filters) {
|
};
|
||||||
filterCopy.series[0].filter.filters = [];
|
const timeRange = {
|
||||||
}
|
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
|
||||||
filterCopy.series[0].filter.filters.push(...filterCopy.filters);
|
startDate: dashboardStore.drillDownPeriod.start,
|
||||||
}
|
endDate: dashboardStore.drillDownPeriod.end,
|
||||||
filterCopy.filters = [];
|
};
|
||||||
}
|
const customFilter = {
|
||||||
} catch (e) {
|
...filter,
|
||||||
// do nothing
|
...timeRange,
|
||||||
|
filters: [...sessionStore.userFilter.filters, clickFilter],
|
||||||
|
};
|
||||||
|
debounceClickMapSearch(customFilter);
|
||||||
|
} else {
|
||||||
|
const hasStartPoint =
|
||||||
|
!!widget.startPoint && widget.metricType === USER_PATH;
|
||||||
|
const onlyFocused = focusedSeries
|
||||||
|
? widget.series.filter((s) => s.name === focusedSeries)
|
||||||
|
: widget.series;
|
||||||
|
const activeSeries = metricStore.disabledSeries.length
|
||||||
|
? onlyFocused.filter(
|
||||||
|
(s) => !metricStore.disabledSeries.includes(s.name),
|
||||||
|
)
|
||||||
|
: onlyFocused;
|
||||||
|
const seriesJson = activeSeries.map((s) => s.toJson());
|
||||||
|
if (hasStartPoint) {
|
||||||
|
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
|
||||||
|
}
|
||||||
|
if (widget.metricType === USER_PATH) {
|
||||||
|
if (
|
||||||
|
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||||
|
widget.data.nodes
|
||||||
|
) {
|
||||||
|
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
|
||||||
|
} else if (
|
||||||
|
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||||
|
!widget.data.nodes?.length
|
||||||
|
) {
|
||||||
|
// no point requesting if we don't have starting point picked by api
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
widget
|
}
|
||||||
.fetchSessions(metricId, filterCopy)
|
debounceRequest(widget.metricId, {
|
||||||
.then((res: any) => {
|
...filter,
|
||||||
setData(res);
|
series: seriesJson,
|
||||||
if (metricStore.drillDown) {
|
page: metricStore.sessionsPage,
|
||||||
setTimeout(() => {
|
limit: metricStore.sessionsPageSize,
|
||||||
message.info(t('Sessions Refreshed!'));
|
});
|
||||||
listRef.current?.scrollIntoView({behavior: 'smooth'});
|
}
|
||||||
metricStore.setDrillDown(false);
|
};
|
||||||
}, 0);
|
useEffect(() => {
|
||||||
}
|
metricStore.updateKey('sessionsPage', 1);
|
||||||
})
|
loadData();
|
||||||
.finally(() => {
|
}, [
|
||||||
setLoading(false);
|
filter.startTimestamp,
|
||||||
});
|
filter.endTimestamp,
|
||||||
};
|
filter.filters,
|
||||||
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
|
depsString,
|
||||||
sessionStore.getSessions(customFilters).then((data) => {
|
metricStore.clickMapSearch,
|
||||||
setData([{...data, seriesId: 1, seriesName: 'Clicks'}]);
|
focusedSeries,
|
||||||
});
|
widget.startPoint,
|
||||||
};
|
widget.data.nodes,
|
||||||
const debounceRequest: any = React.useCallback(
|
metricStore.disabledSeries.length,
|
||||||
debounce(fetchSessions, 1000),
|
]);
|
||||||
[],
|
useEffect(loadData, [metricStore.sessionsPage]);
|
||||||
);
|
useEffect(() => {
|
||||||
const debounceClickMapSearch = React.useCallback(
|
if (activeSeries === 'all') {
|
||||||
debounce(fetchClickmapSessions, 1000),
|
metricStore.setFocusedSeriesName(null);
|
||||||
[],
|
} else {
|
||||||
);
|
metricStore.setFocusedSeriesName(
|
||||||
|
seriesOptions.find((option) => option.value === activeSeries)?.label,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, [activeSeries]);
|
||||||
|
useEffect(() => {
|
||||||
|
if (focusedSeries) {
|
||||||
|
setActiveSeries(
|
||||||
|
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
|
||||||
|
'all',
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setActiveSeries('all');
|
||||||
|
}
|
||||||
|
}, [focusedSeries]);
|
||||||
|
|
||||||
const depsString = JSON.stringify(widget.series);
|
const clearFilters = () => {
|
||||||
|
metricStore.updateKey('sessionsPage', 1);
|
||||||
|
dashboardStore.resetDrillDownFilter();
|
||||||
|
};
|
||||||
|
|
||||||
const loadData = () => {
|
return (
|
||||||
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
|
<div
|
||||||
const clickFilter = {
|
className={cn(
|
||||||
value: [metricStore.clickMapSearch],
|
className,
|
||||||
type: 'CLICK',
|
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
|
||||||
operator: 'onSelector',
|
)}
|
||||||
isEvent: true,
|
>
|
||||||
// @ts-ignore
|
<div className="flex items-center justify-between">
|
||||||
filters: [],
|
<div>
|
||||||
};
|
<div className="flex items-baseline gap-2">
|
||||||
const timeRange = {
|
<h2 className="text-xl">
|
||||||
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
|
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
|
||||||
startDate: dashboardStore.drillDownPeriod.start,
|
</h2>
|
||||||
endDate: dashboardStore.drillDownPeriod.end,
|
<div className="ml-2 color-gray-medium">
|
||||||
};
|
{metricStore.clickMapLabel
|
||||||
const customFilter = {
|
? `on "${metricStore.clickMapLabel}" `
|
||||||
...filter,
|
: null}
|
||||||
...timeRange,
|
{t('between')}{' '}
|
||||||
filters: [...sessionStore.userFilter.filters, clickFilter],
|
<span className="font-medium color-gray-darkest">
|
||||||
};
|
|
||||||
debounceClickMapSearch(customFilter);
|
|
||||||
} else {
|
|
||||||
const hasStartPoint =
|
|
||||||
!!widget.startPoint && widget.metricType === USER_PATH;
|
|
||||||
const onlyFocused = focusedSeries
|
|
||||||
? widget.series.filter((s) => s.name === focusedSeries)
|
|
||||||
: widget.series;
|
|
||||||
const activeSeries = metricStore.disabledSeries.length
|
|
||||||
? onlyFocused.filter(
|
|
||||||
(s) => !metricStore.disabledSeries.includes(s.name),
|
|
||||||
)
|
|
||||||
: onlyFocused;
|
|
||||||
const seriesJson = activeSeries.map((s) => s.toJson());
|
|
||||||
if (hasStartPoint) {
|
|
||||||
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
|
|
||||||
}
|
|
||||||
if (widget.metricType === USER_PATH) {
|
|
||||||
if (
|
|
||||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
|
||||||
widget.data.nodes?.length
|
|
||||||
) {
|
|
||||||
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
|
|
||||||
} else if (
|
|
||||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
|
||||||
!widget.data.nodes?.length
|
|
||||||
) {
|
|
||||||
// no point requesting if we don't have starting point picked by api
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
debounceRequest(widget.metricId, {
|
|
||||||
...filter,
|
|
||||||
series: seriesJson,
|
|
||||||
page: metricStore.sessionsPage,
|
|
||||||
limit: metricStore.sessionsPageSize,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
useEffect(() => {
|
|
||||||
metricStore.updateKey('sessionsPage', 1);
|
|
||||||
loadData();
|
|
||||||
}, [
|
|
||||||
filter.startTimestamp,
|
|
||||||
filter.endTimestamp,
|
|
||||||
filter.filters,
|
|
||||||
depsString,
|
|
||||||
metricStore.clickMapSearch,
|
|
||||||
focusedSeries,
|
|
||||||
widget.startPoint,
|
|
||||||
widget.data.nodes,
|
|
||||||
metricStore.disabledSeries.length,
|
|
||||||
]);
|
|
||||||
useEffect(loadData, [metricStore.sessionsPage]);
|
|
||||||
useEffect(() => {
|
|
||||||
if (activeSeries === 'all') {
|
|
||||||
metricStore.setFocusedSeriesName(null);
|
|
||||||
} else {
|
|
||||||
metricStore.setFocusedSeriesName(
|
|
||||||
seriesOptions.find((option) => option.value === activeSeries)?.label,
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}, [activeSeries]);
|
|
||||||
useEffect(() => {
|
|
||||||
if (focusedSeries) {
|
|
||||||
setActiveSeries(
|
|
||||||
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
|
|
||||||
'all',
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
setActiveSeries('all');
|
|
||||||
}
|
|
||||||
}, [focusedSeries]);
|
|
||||||
|
|
||||||
const clearFilters = () => {
|
|
||||||
metricStore.updateKey('sessionsPage', 1);
|
|
||||||
dashboardStore.resetDrillDownFilter();
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className={cn(
|
|
||||||
className,
|
|
||||||
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<div>
|
|
||||||
<div className="flex items-baseline gap-2">
|
|
||||||
<h2 className="text-xl">
|
|
||||||
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
|
|
||||||
</h2>
|
|
||||||
<div className="ml-2 color-gray-medium">
|
|
||||||
{metricStore.clickMapLabel
|
|
||||||
? `on "${metricStore.clickMapLabel}" `
|
|
||||||
: null}
|
|
||||||
{t('between')}{' '}
|
|
||||||
<span className="font-medium color-gray-darkest">
|
|
||||||
{startTime}
|
{startTime}
|
||||||
</span>{' '}
|
</span>{' '}
|
||||||
{t('and')}{' '}
|
{t('and')}{' '}
|
||||||
<span className="font-medium color-gray-darkest">
|
<span className="font-medium color-gray-darkest">
|
||||||
{endTime}
|
{endTime}
|
||||||
</span>{' '}
|
</span>{' '}
|
||||||
</div>
|
</div>
|
||||||
{hasFilters && (
|
{hasFilters && (
|
||||||
<Tooltip title={t('Clear Drilldown')} placement="top">
|
<Tooltip title={t('Clear Drilldown')} placement="top">
|
||||||
<Button type="text" size="small" onClick={clearFilters}>
|
<Button type="text" size="small" onClick={clearFilters}>
|
||||||
<UndoOutlined/>
|
<UndoOutlined />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasFilters && widget.metricType === 'table' && (
|
{hasFilters && widget.metricType === 'table' && (
|
||||||
<div className="py-2">
|
<div className="py-2">
|
||||||
<Tag
|
<Tag
|
||||||
closable
|
closable
|
||||||
onClose={clearFilters}
|
onClose={clearFilters}
|
||||||
className="truncate max-w-44 rounded-lg"
|
className="truncate max-w-44 rounded-lg"
|
||||||
>
|
>
|
||||||
{filterText}
|
{filterText}
|
||||||
</Tag>
|
</Tag>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-4">
|
||||||
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
|
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
|
||||||
<div className="flex items-center ml-6">
|
<div className="flex items-center ml-6">
|
||||||
<span className="mr-2 color-gray-medium">
|
<span className="mr-2 color-gray-medium">
|
||||||
{t('Filter by Series')}
|
{t('Filter by Series')}
|
||||||
</span>
|
</span>
|
||||||
<Dropdown
|
<Dropdown
|
||||||
menu={{
|
menu={{
|
||||||
items: seriesDropdownItems,
|
items: seriesDropdownItems,
|
||||||
selectable: true,
|
selectable: true,
|
||||||
selectedKeys: [activeSeries],
|
selectedKeys: [activeSeries],
|
||||||
}}
|
}}
|
||||||
trigger={['click']}
|
trigger={['click']}
|
||||||
>
|
>
|
||||||
<Button type="text" size="small">
|
<Button type="text" size="small">
|
||||||
{seriesOptions.find((option) => option.value === activeSeries)
|
{seriesOptions.find((option) => option.value === activeSeries)
|
||||||
?.label || t('Select Series')}
|
?.label || t('Select Series')}
|
||||||
<DownOutlined/>
|
<DownOutlined />
|
||||||
</Button>
|
</Button>
|
||||||
</Dropdown>
|
</Dropdown>
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="mt-3">
|
<div className="mt-3">
|
||||||
<Loader loading={loading}>
|
<Loader loading={loading}>
|
||||||
<NoContent
|
<NoContent
|
||||||
title={
|
title={
|
||||||
<div className="flex items-center justify-center flex-col">
|
<div className="flex items-center justify-center flex-col">
|
||||||
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60}/>
|
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60} />
|
||||||
<div className="mt-4"/>
|
<div className="mt-4" />
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
{t('No relevant sessions found for the selected time period')}
|
{t('No relevant sessions found for the selected time period')}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
show={filteredSessions.sessions.length === 0}
|
show={filteredSessions.sessions.length === 0}
|
||||||
>
|
>
|
||||||
{filteredSessions.sessions.map((session: any) => (
|
{filteredSessions.sessions.map((session: any) => (
|
||||||
<React.Fragment key={session.sessionId}>
|
<React.Fragment key={session.sessionId}>
|
||||||
<SessionItem
|
<SessionItem
|
||||||
disableUser
|
disableUser
|
||||||
session={session}
|
session={session}
|
||||||
metaList={metaList}
|
metaList={metaList}
|
||||||
/>
|
/>
|
||||||
<div className="border-b"/>
|
<div className="border-b" />
|
||||||
</React.Fragment>
|
</React.Fragment>
|
||||||
))}
|
))}
|
||||||
|
|
||||||
<div
|
<div
|
||||||
className="flex items-center justify-between p-5"
|
className="flex items-center justify-between p-5"
|
||||||
ref={listRef}
|
ref={listRef}
|
||||||
>
|
>
|
||||||
<div>
|
<div>
|
||||||
{t('Showing')}{' '}
|
{t('Showing')}{' '}
|
||||||
<span className="font-medium">
|
<span className="font-medium">
|
||||||
{(metricStore.sessionsPage - 1) *
|
{(metricStore.sessionsPage - 1) *
|
||||||
metricStore.sessionsPageSize +
|
metricStore.sessionsPageSize +
|
||||||
1}
|
1}
|
||||||
</span>{' '}
|
</span>{' '}
|
||||||
{t('to')}{' '}
|
{t('to')}{' '}
|
||||||
<span className="font-medium">
|
<span className="font-medium">
|
||||||
{(metricStore.sessionsPage - 1) *
|
{(metricStore.sessionsPage - 1) *
|
||||||
metricStore.sessionsPageSize +
|
metricStore.sessionsPageSize +
|
||||||
filteredSessions.sessions.length}
|
filteredSessions.sessions.length}
|
||||||
</span>{' '}
|
</span>{' '}
|
||||||
{t('of')}{' '}
|
{t('of')}{' '}
|
||||||
<span className="font-medium">
|
<span className="font-medium">
|
||||||
{numberWithCommas(filteredSessions.total)}
|
{numberWithCommas(filteredSessions.total)}
|
||||||
</span>{' '}
|
</span>{' '}
|
||||||
{t('sessions.')}
|
{t('sessions.')}
|
||||||
</div>
|
</div>
|
||||||
<Pagination
|
<Pagination
|
||||||
page={metricStore.sessionsPage}
|
page={metricStore.sessionsPage}
|
||||||
total={filteredSessions.total}
|
total={filteredSessions.total}
|
||||||
onPageChange={(page: any) =>
|
onPageChange={(page: any) =>
|
||||||
metricStore.updateKey('sessionsPage', page)
|
metricStore.updateKey('sessionsPage', page)
|
||||||
}
|
}
|
||||||
limit={metricStore.sessionsPageSize}
|
limit={metricStore.sessionsPageSize}
|
||||||
debounceRequest={500}
|
debounceRequest={500}
|
||||||
/>
|
/>
|
||||||
</div>
|
|
||||||
</NoContent>
|
|
||||||
</Loader>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</NoContent>
|
||||||
);
|
</Loader>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const getListSessionsBySeries = (data: any, seriesId: any) => {
|
const getListSessionsBySeries = (data: any, seriesId: any) => {
|
||||||
const arr = data.reduce(
|
const arr = data.reduce(
|
||||||
(arr: any, element: any) => {
|
(arr: any, element: any) => {
|
||||||
if (seriesId === 'all') {
|
if (seriesId === 'all') {
|
||||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||||
const sessions = element.sessions.filter(
|
const sessions = element.sessions.filter(
|
||||||
(i: any) => !sessionIds.includes(i.sessionId),
|
(i: any) => !sessionIds.includes(i.sessionId),
|
||||||
);
|
);
|
||||||
arr.sessions.push(...sessions);
|
arr.sessions.push(...sessions);
|
||||||
} else if (element.seriesId === seriesId) {
|
} else if (element.seriesId === seriesId) {
|
||||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||||
const sessions = element.sessions.filter(
|
const sessions = element.sessions.filter(
|
||||||
(i: any) => !sessionIds.includes(i.sessionId),
|
(i: any) => !sessionIds.includes(i.sessionId),
|
||||||
);
|
);
|
||||||
const duplicates = element.sessions.length - sessions.length;
|
const duplicates = element.sessions.length - sessions.length;
|
||||||
arr.sessions.push(...sessions);
|
arr.sessions.push(...sessions);
|
||||||
arr.total = element.total - duplicates;
|
arr.total = element.total - duplicates;
|
||||||
}
|
}
|
||||||
return arr;
|
return arr;
|
||||||
},
|
},
|
||||||
{sessions: []},
|
{ sessions: [] },
|
||||||
);
|
);
|
||||||
arr.total =
|
arr.total =
|
||||||
seriesId === 'all'
|
seriesId === 'all'
|
||||||
? Math.max(...data.map((i: any) => i.total))
|
? Math.max(...data.map((i: any) => i.total))
|
||||||
: data.find((i: any) => i.seriesId === seriesId).total;
|
: data.find((i: any) => i.seriesId === seriesId).total;
|
||||||
return arr;
|
return arr;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default observer(WidgetSessions);
|
export default observer(WidgetSessions);
|
||||||
|
|
|
||||||
|
|
@ -92,9 +92,6 @@ function WidgetView({
|
||||||
filter: { filters: selectedCard.filters },
|
filter: { filters: selectedCard.filters },
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
} else if (selectedCard.cardType === TABLE) {
|
|
||||||
cardData.series = [new FilterSeries()];
|
|
||||||
cardData.series[0].filter.eventsOrder = 'and';
|
|
||||||
}
|
}
|
||||||
if (selectedCard.cardType === FUNNEL) {
|
if (selectedCard.cardType === FUNNEL) {
|
||||||
cardData.series = [new FilterSeries()];
|
cardData.series = [new FilterSeries()];
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,6 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
|
||||||
});
|
});
|
||||||
|
|
||||||
const onChartClick = () => {
|
const onChartClick = () => {
|
||||||
dashboardStore.setDrillDownPeriod(dashboardStore.period);
|
|
||||||
// if (!isWidget || isPredefined) return;
|
// if (!isWidget || isPredefined) return;
|
||||||
props.history.push(
|
props.history.push(
|
||||||
withSiteId(
|
withSiteId(
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,7 @@
|
||||||
import { Button, Dropdown, MenuProps, Space, Typography } from 'antd';
|
import { Button, Dropdown, MenuProps, Typography } from 'antd';
|
||||||
import React, { useCallback, useState } from 'react';
|
import React from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { CaretDownOutlined } from '@ant-design/icons';
|
import { ChevronDown } from 'lucide-react';
|
||||||
import { Languages } from 'lucide-react';
|
|
||||||
import { Icon } from '../ui';
|
|
||||||
|
|
||||||
const langs = [
|
const langs = [
|
||||||
{ code: 'en', label: 'English' },
|
{ code: 'en', label: 'English' },
|
||||||
|
|
@ -12,14 +10,25 @@ const langs = [
|
||||||
{ code: 'ru', label: 'Русский' },
|
{ code: 'ru', label: 'Русский' },
|
||||||
{ code: 'zh', label: '中國人' },
|
{ code: 'zh', label: '中國人' },
|
||||||
];
|
];
|
||||||
|
const langLabels = {
|
||||||
|
en: 'English',
|
||||||
|
fr: 'Français',
|
||||||
|
es: 'Español',
|
||||||
|
ru: 'Русский',
|
||||||
|
zh: '中國人',
|
||||||
|
}
|
||||||
|
|
||||||
function LanguageSwitcher() {
|
function LanguageSwitcher() {
|
||||||
const { i18n } = useTranslation();
|
const { i18n } = useTranslation();
|
||||||
|
const [selected, setSelected] = React.useState(i18n.language);
|
||||||
|
|
||||||
const handleChangeLanguage = useCallback((lang: string) => {
|
const onChange = (val: string) => {
|
||||||
i18n.changeLanguage(lang);
|
setSelected(val)
|
||||||
localStorage.setItem('i18nextLng', lang);
|
}
|
||||||
}, []);
|
const handleChangeLanguage = () => {
|
||||||
|
void i18n.changeLanguage(selected)
|
||||||
|
localStorage.setItem('i18nextLng', selected)
|
||||||
|
}
|
||||||
|
|
||||||
const menuItems: MenuProps['items'] = langs.map((lang) => ({
|
const menuItems: MenuProps['items'] = langs.map((lang) => ({
|
||||||
key: lang.code,
|
key: lang.code,
|
||||||
|
|
@ -31,21 +40,31 @@ function LanguageSwitcher() {
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dropdown
|
<div className={'flex flex-col gap-2 align-start'}>
|
||||||
menu={{
|
<div className={'font-semibold'}>{i18n.t('Language')}</div>
|
||||||
items: menuItems,
|
<Dropdown
|
||||||
selectable: true,
|
menu={{
|
||||||
defaultSelectedKeys: [i18n.language],
|
items: menuItems,
|
||||||
style: {
|
selectable: true,
|
||||||
maxHeight: 500,
|
defaultSelectedKeys: [i18n.language],
|
||||||
overflowY: 'auto',
|
style: {
|
||||||
},
|
maxHeight: 500,
|
||||||
onClick: (e) => handleChangeLanguage(e.key),
|
overflowY: 'auto',
|
||||||
}}
|
},
|
||||||
placement="bottomLeft"
|
onClick: (e) => onChange(e.key),
|
||||||
>
|
}}
|
||||||
<Button icon={<Languages size={12} />} />
|
>
|
||||||
</Dropdown>
|
<Button>
|
||||||
|
<div className={'flex justify-between items-center gap-8'}>
|
||||||
|
<span>{langLabels[selected]}</span>
|
||||||
|
<ChevronDown size={14} />
|
||||||
|
</div>
|
||||||
|
</Button>
|
||||||
|
</Dropdown>
|
||||||
|
<Button className={'w-fit'} onClick={handleChangeLanguage}>
|
||||||
|
{i18n.t('Update')}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ interface SSOLoginProps {
|
||||||
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||||
const { userStore } = useStore();
|
const { userStore } = useStore();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { isSSOSupported } = userStore;
|
const { isEnterprise } = userStore;
|
||||||
|
|
||||||
const getSSOLink = () =>
|
const getSSOLink = () =>
|
||||||
window !== window.top
|
window !== window.top
|
||||||
|
|
@ -23,7 +23,7 @@ const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||||
|
|
||||||
const ssoLink = getSSOLink();
|
const ssoLink = getSSOLink();
|
||||||
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
if (enforceSSO) {
|
if (enforceSSO) {
|
||||||
return (
|
return (
|
||||||
|
|
@ -47,7 +47,7 @@ const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
|
||||||
<Tooltip
|
<Tooltip
|
||||||
title={
|
title={
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
{isSSOSupported ? (
|
{isEnterprise ? (
|
||||||
<span>
|
<span>
|
||||||
{t('SSO has not been configured.')}
|
{t('SSO has not been configured.')}
|
||||||
<br />
|
<br />
|
||||||
|
|
|
||||||
|
|
@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
const installationCommand = 'npm i @openreplay/tracker';
|
const installationCommand = 'npm i @openreplay/tracker';
|
||||||
const usageCode = `import Tracker from '@openreplay/tracker';
|
const usageCode = `import { tracker } from '@openreplay/tracker';
|
||||||
|
|
||||||
const tracker = new Tracker({
|
tracker.configure({
|
||||||
projectKey: "PROJECT_KEY",
|
projectKey: "PROJECT_KEY",
|
||||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||||
});
|
});
|
||||||
tracker.start()`;
|
tracker.start()`;
|
||||||
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
|
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
|
||||||
|
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
|
||||||
|
|
||||||
const tracker = new Tracker({
|
tracker.configure({
|
||||||
projectKey: "PROJECT_KEY",
|
projectKey: "PROJECT_KEY",
|
||||||
ingestPoint: "https://${window.location.hostname}/ingest",
|
ingestPoint: "https://${window.location.hostname}/ingest",
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ import {
|
||||||
LikeFilled,
|
LikeFilled,
|
||||||
LikeOutlined,
|
LikeOutlined,
|
||||||
} from '@ant-design/icons';
|
} from '@ant-design/icons';
|
||||||
import { Tour, TourProps } from 'antd';
|
import { Tour, TourProps } from './.store/antd-virtual-7db13b4af6/package';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ function PlayerBlockHeader(props: Props) {
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="relative border-l" style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}>
|
<div className="relative border-l" style={{ minWidth: '270px' }}>
|
||||||
<Tabs
|
<Tabs
|
||||||
tabs={TABS}
|
tabs={TABS}
|
||||||
active={activeTab}
|
active={activeTab}
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ function PlayerContent({
|
||||||
className="w-full"
|
className="w-full"
|
||||||
style={
|
style={
|
||||||
activeTab && !fullscreen
|
activeTab && !fullscreen
|
||||||
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
|
? { maxWidth: 'calc(100% - 270px)' }
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ function DropdownAudioPlayer({
|
||||||
return {
|
return {
|
||||||
url: data.url,
|
url: data.url,
|
||||||
timestamp: data.timestamp,
|
timestamp: data.timestamp,
|
||||||
start: Math.max(0, startTs),
|
start: startTs,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
[audioEvents.length, sessionStart],
|
[audioEvents.length, sessionStart],
|
||||||
|
|
|
||||||
|
|
@ -114,17 +114,19 @@ function PlayerBlockHeader(props: any) {
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{_metaList.length > 0 && (
|
{_metaList.length > 0 && (
|
||||||
<SessionMetaList
|
<div className="h-full flex items-center px-2 gap-1">
|
||||||
horizontal
|
<SessionMetaList
|
||||||
metaList={_metaList}
|
className=""
|
||||||
maxLength={2}
|
metaList={_metaList}
|
||||||
/>
|
maxLength={2}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
className="px-2 relative border-l border-l-gray-lighter"
|
className="px-2 relative border-l border-l-gray-lighter"
|
||||||
style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}
|
style={{ minWidth: '270px' }}
|
||||||
>
|
>
|
||||||
<Tabs
|
<Tabs
|
||||||
tabs={TABS}
|
tabs={TABS}
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,7 @@ function PlayerContent({
|
||||||
className="w-full"
|
className="w-full"
|
||||||
style={
|
style={
|
||||||
activeTab && !fullscreen
|
activeTab && !fullscreen
|
||||||
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
|
? { maxWidth: 'calc(100% - 270px)' }
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
|
|
|
||||||
|
|
@ -182,7 +182,6 @@ function Player(props: IProps) {
|
||||||
setActiveTab={(tab: string) =>
|
setActiveTab={(tab: string) =>
|
||||||
activeTab === tab ? props.setActiveTab('') : props.setActiveTab(tab)
|
activeTab === tab ? props.setActiveTab('') : props.setActiveTab(tab)
|
||||||
}
|
}
|
||||||
activeTab={activeTab}
|
|
||||||
speedDown={playerContext.player.speedDown}
|
speedDown={playerContext.player.speedDown}
|
||||||
speedUp={playerContext.player.speedUp}
|
speedUp={playerContext.player.speedUp}
|
||||||
jump={playerContext.player.jump}
|
jump={playerContext.player.jump}
|
||||||
|
|
|
||||||
|
|
@ -7,16 +7,13 @@ import { Icon } from 'UI';
|
||||||
function LogsButton({
|
function LogsButton({
|
||||||
integrated,
|
integrated,
|
||||||
onClick,
|
onClick,
|
||||||
shorten,
|
|
||||||
}: {
|
}: {
|
||||||
integrated: string[];
|
integrated: string[];
|
||||||
onClick: () => void;
|
onClick: () => void;
|
||||||
shorten?: boolean;
|
|
||||||
}) {
|
}) {
|
||||||
return (
|
return (
|
||||||
<ControlButton
|
<ControlButton
|
||||||
label={shorten ? null : "Traces"}
|
label="Traces"
|
||||||
customKey="traces"
|
|
||||||
customTags={
|
customTags={
|
||||||
<Avatar.Group>
|
<Avatar.Group>
|
||||||
{integrated.map((name) => (
|
{integrated.map((name) => (
|
||||||
|
|
|
||||||
|
|
@ -38,8 +38,8 @@ function WebPlayer(props: any) {
|
||||||
uxtestingStore,
|
uxtestingStore,
|
||||||
uiPlayerStore,
|
uiPlayerStore,
|
||||||
integrationsStore,
|
integrationsStore,
|
||||||
|
userStore,
|
||||||
} = useStore();
|
} = useStore();
|
||||||
const devTools = sessionStore.devTools
|
|
||||||
const session = sessionStore.current;
|
const session = sessionStore.current;
|
||||||
const { prefetched } = sessionStore;
|
const { prefetched } = sessionStore;
|
||||||
const startedAt = sessionStore.current.startedAt || 0;
|
const startedAt = sessionStore.current.startedAt || 0;
|
||||||
|
|
@ -57,17 +57,14 @@ function WebPlayer(props: any) {
|
||||||
const [fullView, setFullView] = useState(false);
|
const [fullView, setFullView] = useState(false);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
const handleActivation = () => {
|
if (windowActive) {
|
||||||
if (!document.hidden) {
|
const handleActivation = () => {
|
||||||
setWindowActive(true);
|
if (!document.hidden) {
|
||||||
document.removeEventListener('visibilitychange', handleActivation);
|
setWindowActive(true);
|
||||||
}
|
document.removeEventListener('visibilitychange', handleActivation);
|
||||||
};
|
}
|
||||||
document.addEventListener('visibilitychange', handleActivation);
|
};
|
||||||
|
document.addEventListener('visibilitychange', handleActivation);
|
||||||
return () => {
|
|
||||||
devTools.update('network', { activeTab: 'ALL' });
|
|
||||||
document.removeEventListener('visibilitychange', handleActivation);
|
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -169,6 +169,6 @@ function TabChange({ from, to, activeUrl, onClick }) {
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
}
|
||||||
|
|
||||||
export default observer(EventGroupWrapper);
|
export default observer(EventGroupWrapper);
|
||||||
|
|
@ -4,17 +4,17 @@ import cn from 'classnames';
|
||||||
import { observer } from 'mobx-react-lite';
|
import { observer } from 'mobx-react-lite';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { VList, VListHandle } from 'virtua';
|
import { VList, VListHandle } from 'virtua';
|
||||||
import { Button } from 'antd';
|
import { Button } from 'antd'
|
||||||
import { PlayerContext } from 'App/components/Session/playerContext';
|
import { PlayerContext } from 'App/components/Session/playerContext';
|
||||||
import { useStore } from 'App/mstore';
|
import { useStore } from 'App/mstore';
|
||||||
import { Icon } from 'UI';
|
import { Icon } from 'UI';
|
||||||
import { Search } from 'lucide-react';
|
import { Search } from 'lucide-react'
|
||||||
import EventGroupWrapper from './EventGroupWrapper';
|
import EventGroupWrapper from './EventGroupWrapper';
|
||||||
import EventSearch from './EventSearch/EventSearch';
|
import EventSearch from './EventSearch/EventSearch';
|
||||||
import styles from './eventsBlock.module.css';
|
import styles from './eventsBlock.module.css';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { CloseOutlined } from "@ant-design/icons";
|
import { CloseOutlined } from ".store/@ant-design-icons-virtual-42686020c5/package";
|
||||||
import { Tooltip } from "antd";
|
import { Tooltip } from ".store/antd-virtual-9dbfadb7f6/package";
|
||||||
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
|
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
|
||||||
|
|
||||||
interface IProps {
|
interface IProps {
|
||||||
|
|
@ -25,7 +25,7 @@ const MODES = {
|
||||||
SELECT: 'select',
|
SELECT: 'select',
|
||||||
SEARCH: 'search',
|
SEARCH: 'search',
|
||||||
EXPORT: 'export',
|
EXPORT: 'export',
|
||||||
};
|
}
|
||||||
|
|
||||||
function EventsBlock(props: IProps) {
|
function EventsBlock(props: IProps) {
|
||||||
const defaultFramework = getDefaultFramework();
|
const defaultFramework = getDefaultFramework();
|
||||||
|
|
@ -95,7 +95,7 @@ function EventsBlock(props: IProps) {
|
||||||
? e.time >= zoomStartTs && e.time <= zoomEndTs
|
? e.time >= zoomStartTs && e.time <= zoomEndTs
|
||||||
: false
|
: false
|
||||||
: true,
|
: true,
|
||||||
);
|
);
|
||||||
}, [
|
}, [
|
||||||
filteredLength,
|
filteredLength,
|
||||||
notesWithEvtsLength,
|
notesWithEvtsLength,
|
||||||
|
|
@ -126,7 +126,6 @@ function EventsBlock(props: IProps) {
|
||||||
},
|
},
|
||||||
[usedEvents, time, endTime],
|
[usedEvents, time, endTime],
|
||||||
);
|
);
|
||||||
|
|
||||||
const currentTimeEventIndex = findLastFitting(time);
|
const currentTimeEventIndex = findLastFitting(time);
|
||||||
|
|
||||||
const write = ({
|
const write = ({
|
||||||
|
|
@ -183,7 +182,6 @@ function EventsBlock(props: IProps) {
|
||||||
const isTabChange = 'type' in event && event.type === 'TABCHANGE';
|
const isTabChange = 'type' in event && event.type === 'TABCHANGE';
|
||||||
const isCurrent = index === currentTimeEventIndex;
|
const isCurrent = index === currentTimeEventIndex;
|
||||||
const isPrev = index < currentTimeEventIndex;
|
const isPrev = index < currentTimeEventIndex;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<EventGroupWrapper
|
<EventGroupWrapper
|
||||||
query={query}
|
query={query}
|
||||||
|
|
@ -251,14 +249,12 @@ function EventsBlock(props: IProps) {
|
||||||
onClick={() => setMode(MODES.SEARCH)}
|
onClick={() => setMode(MODES.SEARCH)}
|
||||||
>
|
>
|
||||||
<Search size={14} />
|
<Search size={14} />
|
||||||
<div>
|
<div>{t('Search')} {usedEvents.length} {t('events')}</div>
|
||||||
{t('Search')} {usedEvents.length} {t('events')}
|
|
||||||
</div>
|
|
||||||
</Button>
|
</Button>
|
||||||
<Tooltip title={t('Close Panel')} placement="bottom">
|
<Tooltip title={t('Close Panel')} placement='bottom' >
|
||||||
<Button
|
<Button
|
||||||
className="ml-auto"
|
className="ml-auto"
|
||||||
type="text"
|
type='text'
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
setActiveTab('');
|
setActiveTab('');
|
||||||
}}
|
}}
|
||||||
|
|
@ -267,23 +263,19 @@ function EventsBlock(props: IProps) {
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{mode === MODES.SEARCH ? (
|
{mode === MODES.SEARCH ?
|
||||||
<div className={'flex items-center gap-2'}>
|
<div className={'flex items-center gap-2'}>
|
||||||
<EventSearch
|
<EventSearch
|
||||||
onChange={write}
|
onChange={write}
|
||||||
setActiveTab={setActiveTab}
|
setActiveTab={setActiveTab}
|
||||||
value={query}
|
value={query}
|
||||||
eventsText={
|
eventsText={
|
||||||
usedEvents.length
|
usedEvents.length ? `${usedEvents.length} ${t('Events')}` : `0 ${t('Events')}`
|
||||||
? `${usedEvents.length} ${t('Events')}`
|
|
||||||
: `0 ${t('Events')}`
|
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>
|
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>{t('Cancel')}</Button>
|
||||||
{t('Cancel')}
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
: null}
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
className={cn('flex-1 pb-4', styles.eventsList)}
|
className={cn('flex-1 pb-4', styles.eventsList)}
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import { Popover, Button } from 'antd';
|
||||||
import stl from './controlButton.module.css';
|
import stl from './controlButton.module.css';
|
||||||
|
|
||||||
interface IProps {
|
interface IProps {
|
||||||
label: React.ReactNode;
|
label: string;
|
||||||
icon?: string;
|
icon?: string;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
onClick?: () => void;
|
onClick?: () => void;
|
||||||
|
|
@ -18,7 +18,6 @@ interface IProps {
|
||||||
noIcon?: boolean;
|
noIcon?: boolean;
|
||||||
popover?: React.ReactNode;
|
popover?: React.ReactNode;
|
||||||
customTags?: React.ReactNode;
|
customTags?: React.ReactNode;
|
||||||
customKey?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function ControlButton({
|
function ControlButton({
|
||||||
|
|
@ -29,28 +28,29 @@ function ControlButton({
|
||||||
active = false,
|
active = false,
|
||||||
popover = undefined,
|
popover = undefined,
|
||||||
customTags,
|
customTags,
|
||||||
customKey,
|
|
||||||
}: IProps) {
|
}: IProps) {
|
||||||
return (
|
return (
|
||||||
<Popover content={popover} open={popover ? undefined : false}>
|
<Popover content={popover} open={popover ? undefined : false}>
|
||||||
<Button
|
<Button
|
||||||
size="small"
|
size="small"
|
||||||
onClick={onClick}
|
onClick={onClick}
|
||||||
id={`control-button-${customKey ? customKey.toLowerCase() : label!.toString().toLowerCase()}`}
|
id={`control-button-${label.toLowerCase()}`}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
>
|
>
|
||||||
{customTags}
|
{customTags}
|
||||||
{hasErrors && (
|
{hasErrors && (
|
||||||
<div className="w-2 h-2 rounded-full bg-red" />
|
<div className={stl.labels}>
|
||||||
|
<div className={stl.errorSymbol} />
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
{label && <span
|
<span
|
||||||
className={cn(
|
className={cn(
|
||||||
'font-semibold hover:text-main',
|
'font-semibold hover:text-main',
|
||||||
active ? 'color-main' : 'color-gray-darkest',
|
active ? 'color-main' : 'color-gray-darkest',
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{label}
|
{label}
|
||||||
</span>}
|
</span>
|
||||||
</Button>
|
</Button>
|
||||||
</Popover>
|
</Popover>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -32,8 +32,6 @@ import {
|
||||||
} from 'App/mstore/uiPlayerStore';
|
} from 'App/mstore/uiPlayerStore';
|
||||||
import { Icon } from 'UI';
|
import { Icon } from 'UI';
|
||||||
import LogsButton from 'App/components/Session/Player/SharedComponents/BackendLogs/LogsButton';
|
import LogsButton from 'App/components/Session/Player/SharedComponents/BackendLogs/LogsButton';
|
||||||
import { CodeOutlined, DashboardOutlined, ClusterOutlined } from '@ant-design/icons';
|
|
||||||
import { ArrowDownUp, ListCollapse, Merge, Waypoints } from 'lucide-react'
|
|
||||||
|
|
||||||
import ControlButton from './ControlButton';
|
import ControlButton from './ControlButton';
|
||||||
import Timeline from './Timeline';
|
import Timeline from './Timeline';
|
||||||
|
|
@ -54,23 +52,23 @@ export const SKIP_INTERVALS = {
|
||||||
function getStorageName(type: any) {
|
function getStorageName(type: any) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case STORAGE_TYPES.REDUX:
|
case STORAGE_TYPES.REDUX:
|
||||||
return { name: 'Redux', icon: <Icon name='integrations/redux' size={14} /> };
|
return 'Redux';
|
||||||
case STORAGE_TYPES.MOBX:
|
case STORAGE_TYPES.MOBX:
|
||||||
return { name: 'Mobx', icon: <Icon name='integrations/mobx' size={14} /> };
|
return 'Mobx';
|
||||||
case STORAGE_TYPES.VUEX:
|
case STORAGE_TYPES.VUEX:
|
||||||
return { name: 'Vuex', icon: <Icon name='integrations/vuejs' size={14} /> };
|
return 'Vuex';
|
||||||
case STORAGE_TYPES.NGRX:
|
case STORAGE_TYPES.NGRX:
|
||||||
return { name: 'NgRx', icon: <Icon name='integrations/ngrx' size={14} /> };
|
return 'NgRx';
|
||||||
case STORAGE_TYPES.ZUSTAND:
|
case STORAGE_TYPES.ZUSTAND:
|
||||||
return { name: 'Zustand', icon: <Icon name='integrations/zustand' size={14} /> };
|
return 'Zustand';
|
||||||
case STORAGE_TYPES.NONE:
|
case STORAGE_TYPES.NONE:
|
||||||
return { name: 'State', icon: <ClusterOutlined size={14} /> };
|
return 'State';
|
||||||
default:
|
default:
|
||||||
return { name: 'State', icon: <ClusterOutlined size={14} /> };
|
return 'State';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function Controls({ setActiveTab, activeTab }: any) {
|
function Controls({ setActiveTab }: any) {
|
||||||
const { player, store } = React.useContext(PlayerContext);
|
const { player, store } = React.useContext(PlayerContext);
|
||||||
const {
|
const {
|
||||||
uxtestingStore,
|
uxtestingStore,
|
||||||
|
|
@ -193,7 +191,6 @@ function Controls({ setActiveTab, activeTab }: any) {
|
||||||
bottomBlock={bottomBlock}
|
bottomBlock={bottomBlock}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
events={events}
|
events={events}
|
||||||
activeTab={activeTab}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|
@ -215,7 +212,6 @@ interface IDevtoolsButtons {
|
||||||
bottomBlock: number;
|
bottomBlock: number;
|
||||||
disabled: boolean;
|
disabled: boolean;
|
||||||
events: any[];
|
events: any[];
|
||||||
activeTab?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const DevtoolsButtons = observer(
|
const DevtoolsButtons = observer(
|
||||||
|
|
@ -225,7 +221,6 @@ const DevtoolsButtons = observer(
|
||||||
bottomBlock,
|
bottomBlock,
|
||||||
disabled,
|
disabled,
|
||||||
events,
|
events,
|
||||||
activeTab,
|
|
||||||
}: IDevtoolsButtons) => {
|
}: IDevtoolsButtons) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { aiSummaryStore, integrationsStore } = useStore();
|
const { aiSummaryStore, integrationsStore } = useStore();
|
||||||
|
|
@ -267,36 +262,6 @@ const DevtoolsButtons = observer(
|
||||||
const possibleAudio = events.filter((e) => e.name.includes('media/audio'));
|
const possibleAudio = events.filter((e) => e.name.includes('media/audio'));
|
||||||
const integratedServices =
|
const integratedServices =
|
||||||
integrationsStore.integrations.backendLogIntegrations;
|
integrationsStore.integrations.backendLogIntegrations;
|
||||||
|
|
||||||
const showIcons = activeTab === 'EXPORT'
|
|
||||||
const labels = {
|
|
||||||
console: {
|
|
||||||
icon: <CodeOutlined size={14} />,
|
|
||||||
label: t('Console'),
|
|
||||||
},
|
|
||||||
performance: {
|
|
||||||
icon: <DashboardOutlined size={14} />,
|
|
||||||
label: t('Performance'),
|
|
||||||
},
|
|
||||||
network: {
|
|
||||||
icon: <ArrowDownUp size={14} strokeWidth={2} />,
|
|
||||||
label: t('Network'),
|
|
||||||
},
|
|
||||||
events: {
|
|
||||||
icon: <ListCollapse size={14} strokeWidth={2} />,
|
|
||||||
label: t('Events'),
|
|
||||||
},
|
|
||||||
state: {
|
|
||||||
icon: getStorageName(storageType).icon,
|
|
||||||
label: getStorageName(storageType).name,
|
|
||||||
},
|
|
||||||
graphql: {
|
|
||||||
icon: <Merge size={14} strokeWidth={2} />,
|
|
||||||
label: 'Graphql',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
const getLabel = (block: string) => labels[block][showIcons ? 'icon' : 'label']
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{isSaas ? <SummaryButton onClick={showSummary} /> : null}
|
{isSaas ? <SummaryButton onClick={showSummary} /> : null}
|
||||||
|
|
@ -309,7 +274,6 @@ const DevtoolsButtons = observer(
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="xray"
|
|
||||||
label="X-Ray"
|
label="X-Ray"
|
||||||
onClick={() => toggleBottomTools(OVERVIEW)}
|
onClick={() => toggleBottomTools(OVERVIEW)}
|
||||||
active={bottomBlock === OVERVIEW && !inspectorMode}
|
active={bottomBlock === OVERVIEW && !inspectorMode}
|
||||||
|
|
@ -322,11 +286,10 @@ const DevtoolsButtons = observer(
|
||||||
<div>{t('Launch Console')}</div>
|
<div>{t('Launch Console')}</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="console"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(CONSOLE)}
|
onClick={() => toggleBottomTools(CONSOLE)}
|
||||||
active={bottomBlock === CONSOLE && !inspectorMode}
|
active={bottomBlock === CONSOLE && !inspectorMode}
|
||||||
label={getLabel('console')}
|
label={t('Console')}
|
||||||
hasErrors={logRedCount > 0 || showExceptions}
|
hasErrors={logRedCount > 0 || showExceptions}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|
@ -337,11 +300,10 @@ const DevtoolsButtons = observer(
|
||||||
<div>{t('Launch Network')}</div>
|
<div>{t('Launch Network')}</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="network"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(NETWORK)}
|
onClick={() => toggleBottomTools(NETWORK)}
|
||||||
active={bottomBlock === NETWORK && !inspectorMode}
|
active={bottomBlock === NETWORK && !inspectorMode}
|
||||||
label={getLabel('network')}
|
label={t('Network')}
|
||||||
hasErrors={resourceRedCount > 0}
|
hasErrors={resourceRedCount > 0}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|
@ -352,11 +314,10 @@ const DevtoolsButtons = observer(
|
||||||
<div>{t('Launch Performance')}</div>
|
<div>{t('Launch Performance')}</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="performance"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(PERFORMANCE)}
|
onClick={() => toggleBottomTools(PERFORMANCE)}
|
||||||
active={bottomBlock === PERFORMANCE && !inspectorMode}
|
active={bottomBlock === PERFORMANCE && !inspectorMode}
|
||||||
label={getLabel('performance')}
|
label="Performance"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{showGraphql && (
|
{showGraphql && (
|
||||||
|
|
@ -364,8 +325,7 @@ const DevtoolsButtons = observer(
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(GRAPHQL)}
|
onClick={() => toggleBottomTools(GRAPHQL)}
|
||||||
active={bottomBlock === GRAPHQL && !inspectorMode}
|
active={bottomBlock === GRAPHQL && !inspectorMode}
|
||||||
label={getLabel('graphql')}
|
label="Graphql"
|
||||||
customKey="graphql"
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|
@ -377,11 +337,10 @@ const DevtoolsButtons = observer(
|
||||||
<div>{t('Launch State')}</div>
|
<div>{t('Launch State')}</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="state"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(STORAGE)}
|
onClick={() => toggleBottomTools(STORAGE)}
|
||||||
active={bottomBlock === STORAGE && !inspectorMode}
|
active={bottomBlock === STORAGE && !inspectorMode}
|
||||||
label={getLabel('state')}
|
label={getStorageName(storageType) as string}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<ControlButton
|
<ControlButton
|
||||||
|
|
@ -391,16 +350,14 @@ const DevtoolsButtons = observer(
|
||||||
<div>{t('Launch Events')}</div>
|
<div>{t('Launch Events')}</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
customKey="events"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(STACKEVENTS)}
|
onClick={() => toggleBottomTools(STACKEVENTS)}
|
||||||
active={bottomBlock === STACKEVENTS && !inspectorMode}
|
active={bottomBlock === STACKEVENTS && !inspectorMode}
|
||||||
label={getLabel('events')}
|
label={t('Events')}
|
||||||
hasErrors={stackRedCount > 0}
|
hasErrors={stackRedCount > 0}
|
||||||
/>
|
/>
|
||||||
{showProfiler && (
|
{showProfiler && (
|
||||||
<ControlButton
|
<ControlButton
|
||||||
customKey="profiler"
|
|
||||||
disabled={disableButtons}
|
disabled={disableButtons}
|
||||||
onClick={() => toggleBottomTools(PROFILER)}
|
onClick={() => toggleBottomTools(PROFILER)}
|
||||||
active={bottomBlock === PROFILER && !inspectorMode}
|
active={bottomBlock === PROFILER && !inspectorMode}
|
||||||
|
|
@ -411,7 +368,6 @@ const DevtoolsButtons = observer(
|
||||||
<LogsButton
|
<LogsButton
|
||||||
integrated={integratedServices.map((service) => service.name)}
|
integrated={integratedServices.map((service) => service.name)}
|
||||||
onClick={() => toggleBottomTools(BACKENDLOGS)}
|
onClick={() => toggleBottomTools(BACKENDLOGS)}
|
||||||
shorten={showIcons}
|
|
||||||
/>
|
/>
|
||||||
) : null}
|
) : null}
|
||||||
{possibleAudio.length ? (
|
{possibleAudio.length ? (
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,9 @@ import {
|
||||||
import { observer } from 'mobx-react-lite';
|
import { observer } from 'mobx-react-lite';
|
||||||
import stl from './timeline.module.css';
|
import stl from './timeline.module.css';
|
||||||
import { getTimelinePosition } from './getTimelinePosition';
|
import { getTimelinePosition } from './getTimelinePosition';
|
||||||
import { useStore } from '@/mstore';
|
|
||||||
|
|
||||||
function EventsList() {
|
function EventsList() {
|
||||||
const { store } = useContext(PlayerContext);
|
const { store } = useContext(PlayerContext);
|
||||||
const { uiPlayerStore } = useStore();
|
|
||||||
|
|
||||||
const { eventCount, endTime } = store.get();
|
const { eventCount, endTime } = store.get();
|
||||||
const { tabStates } = store.get();
|
const { tabStates } = store.get();
|
||||||
|
|
@ -19,6 +17,7 @@ function EventsList() {
|
||||||
() => Object.values(tabStates)[0]?.eventList.filter((e) => e.time) || [],
|
() => Object.values(tabStates)[0]?.eventList.filter((e) => e.time) || [],
|
||||||
[eventCount],
|
[eventCount],
|
||||||
);
|
);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
const hasDuplicates = events.some(
|
const hasDuplicates = events.some(
|
||||||
(e, i) =>
|
(e, i) =>
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue