Compare commits

..

7 commits

Author SHA1 Message Date
Alexander
58ac2a04ae feat(assist): tried to fix uWS.HttpResponse.onAborted exception 2025-03-18 13:51:42 +01:00
Alexander
c68bd2b859 feat(assist): tried to implement a blocking queue for a heavy fetchSockets request 2025-03-18 11:50:23 +01:00
Andrey Babushkin
7365d8639c
updated widget link (#3158)
* updated widget link

* fix calls

* updated widget url
2025-03-18 11:07:09 +01:00
nick-delirium
4c967d4bc1
ui: update tracker import examples 2025-03-17 13:42:34 +01:00
Alexander
3fdf799bd7 feat(http): unsupported tracker error with projectID in logs 2025-03-17 13:32:00 +01:00
nick-delirium
9aca716e6b
tracker: 16.0.2 fix str dictionary keys 2025-03-17 11:25:54 +01:00
Shekar Siri
cf9ecdc9a4 refactor(searchStore): reformat filterMap function parameters
- Reformat the parameters of the filterMap function for better readability.
- Comment out the fetchSessions call in clearSearch method to avoid unnecessary session fetch.
2025-03-14 19:47:42 +01:00
218 changed files with 3049 additions and 5286 deletions

View file

@ -1,122 +0,0 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
required: false
default: "false"
push:
branches:
- dev
paths:
- "ee/assist-server/**"
name: Build and Deploy Assist-Server EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- uses: ./.github/composite-actions/update-keys
with:
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
assist_key: ${{ secrets.ASSIST_KEY }}
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
license_key: ${{ secrets.EE_LICENSE_KEY }}
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
pg_password: ${{ secrets.EE_PG_PASSWORD }}
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
name: Update Keys
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pushing Assist-Server image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd assist-server
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("assist-server")
for image in ${images[*]};do
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("assist-server")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
pwd
cd scripts/helmcharts/
# Update changed image tag
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mkdir -p /tmp/charts
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
rm -rf openreplay/charts/*
mv /tmp/charts/* openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging

View file

@ -1,189 +0,0 @@
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
on:
workflow_dispatch:
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
required: true
default: 'chalice,frontend'
tag:
description: 'Tag to update.'
required: true
type: string
branch:
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
required: true
type: string
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
jobs:
deploy:
name: Build Patch from old tag
runs-on: ubuntu-latest
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 4
ref: ${{ github.event.inputs.tag }}
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
- name: Create backup tag with timestamp
run: |
set -e # Exit immediately if a command exits with a non-zero status
TIMESTAMP=$(date +%Y%m%d%H%M%S)
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
echo "Created backup tag: $BACKUP_TAG"
# Get the oldest commit date from the last 3 commits in raw format
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
# Add 1 second to the timestamp
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
- name: Setup yq
uses: mikefarah/yq@master
# Configure AWS credentials for the first registry
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
id: login-ecr-arm
run: |
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
- name: Build
id: build-image
env:
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
run: |
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
}
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout $INPUT_TAG
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
}
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
}
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
done
- name: Change commit timestamp
run: |
# Convert the timestamp to a date format git can understand
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
echo "Setting commit date to: $NEW_DATE"
# Amend the commit with the new date
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
# Verify the change
git log -1 --pretty=format:"Commit now dated: %cD"
# git tag and push
git tag $INPUT_TAG -f
git push origin $INPUT_TAG -f
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
# MSAAS_REPO_FOLDER: /tmp/msaas
# with:
# limit-access-to-actor: true

View file

@ -2,6 +2,7 @@
on:
workflow_dispatch:
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
inputs:
services:
description: 'Comma separated names of services to build(in small letters).'
@ -19,20 +20,12 @@ jobs:
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 1
- name: Rebase with main branch, to make sure the code has latest main changes
if: github.ref != 'refs/heads/main'
run: |
git remote -v
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
git config --global rebase.autoStash true
git fetch origin main:main
git rebase main
git log -3
git pull --rebase origin main
- name: Downloading yq
run: |
@ -55,8 +48,6 @@ jobs:
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
- uses: depot/setup-action@v1
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
- name: Get HEAD Commit ID
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Define Branch Name
@ -74,168 +65,78 @@ jobs:
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
MSAAS_REPO_FOLDER: /tmp/msaas
SERVICES_INPUT: ${{ github.event.inputs.services }}
run: |
#!/bin/bash
set -euo pipefail
# Configuration
readonly WORKING_DIR=$(pwd)
readonly BUILD_SCRIPT_NAME="build.sh"
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
# Initialize git configuration
setup_git() {
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b "$BRANCH_NAME"
set -exo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git checkout -b $BRANCH_NAME
working_dir=$(pwd)
function image_version(){
local service=$1
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
current_version=$(yq eval '.AppVersion' $chart_path)
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
echo $new_version
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
}
# Get and increment image version
image_version() {
local service=$1
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
local current_version new_version
current_version=$(yq eval '.AppVersion' "$chart_path")
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
echo "$new_version"
function clone_msaas() {
[ -d $MSAAS_REPO_FOLDER ] || {
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
cd $MSAAS_REPO_FOLDER
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
git log -1
cd $MSAAS_REPO_FOLDER
bash git-init.sh
git checkout
}
}
# Clone MSAAS repository if not exists
clone_msaas() {
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
cd "$MSAAS_REPO_FOLDER"
cd openreplay && git fetch origin && git checkout main
git log -1
cd "$MSAAS_REPO_FOLDER"
bash git-init.sh
git checkout
fi
function build_managed() {
local service=$1
local version=$2
echo building managed
clone_msaas
if [[ $service == 'chalice' ]]; then
cd $MSAAS_REPO_FOLDER/openreplay/api
else
cd $MSAAS_REPO_FOLDER/openreplay/$service
fi
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
}
# Build managed services
build_managed() {
local service=$1
local version=$2
echo "Building managed service: $service"
clone_msaas
if [[ $service == 'chalice' ]]; then
cd "$MSAAS_REPO_FOLDER/openreplay/api"
else
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
fi
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
echo "Executing: $build_cmd"
if ! eval "$build_cmd" 2>&1; then
echo "Build failed for $service"
exit 1
fi
}
# Build service with given arguments
build_service() {
local service=$1
local version=$2
local build_args=$3
local build_script=${4:-$BUILD_SCRIPT_NAME}
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
echo "Executing: $command"
eval "$command"
}
# Update chart version and commit changes
update_chart_version() {
local service=$1
local version=$2
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
# Ensure we're in the original working directory/repository
cd "$WORKING_DIR"
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
git add "$chart_path"
git commit -m "Increment $service chart version to $version"
git push --set-upstream origin "$BRANCH_NAME"
cd -
}
# Main execution
main() {
setup_git
# Get backend services list
ls backend/cmd >"$BACKEND_SERVICES_FILE"
# Parse services input (fix for GitHub Actions syntax)
echo "Services: ${SERVICES_INPUT:-$1}"
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
# Process each service
for service in "${services[@]}"; do
echo "Processing service: $service"
cd "$WORKING_DIR"
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
# Determine build configuration based on service type
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
# Backend service
cd backend
foss_build_args="nil $service"
ee_build_args="ee $service"
else
# Non-backend service
case "$service" in
chalice | alerts | crons)
cd "$WORKING_DIR/api"
;;
*)
cd "$service"
;;
esac
# Special build scripts for alerts/crons
if [[ $service == 'alerts' || $service == 'crons' ]]; then
build_script="build_${service}.sh"
fi
ee_build_args="ee"
fi
# Get version and build
local version
version=$(image_version "$service")
# Build FOSS and EE versions
build_service "$service" "$version" "$foss_build_args"
build_service "$service" "${version}-ee" "$ee_build_args"
# Build managed version for specific services
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
echo "Nothing to build in managed for service $service"
else
build_managed "$service" "$version"
fi
# Update chart and commit
update_chart_version "$service" "$version"
done
cd "$WORKING_DIR"
# Cleanup
rm -f "$BACKEND_SERVICES_FILE"
}
echo "Working directory: $WORKING_DIR"
# Run main function with all arguments
main "$SERVICES_INPUT"
# Checking for backend images
ls backend/cmd >> /tmp/backend.txt
echo Services: "${{ github.event.inputs.services }}"
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
BUILD_SCRIPT_NAME="build.sh"
# Build FOSS
for SERVICE in "${SERVICES[@]}"; do
# Check if service is backend
if grep -q $SERVICE /tmp/backend.txt; then
cd backend
foss_build_args="nil $SERVICE"
ee_build_args="ee $SERVICE"
else
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
ee_build_args="ee"
fi
version=$(image_version $SERVICE)
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
else
build_managed $SERVICE $version
fi
cd $working_dir
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
yq eval ".AppVersion = \"$version\"" -i $chart_path
git add $chart_path
git commit -m "Increment $SERVICE chart version"
git push --set-upstream origin $BRANCH_NAME
done
- name: Create Pull Request
uses: repo-sync/pull-request@v2
@ -246,7 +147,8 @@ jobs:
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
pr_body: |
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
Once this PR is merged, tag update job will run automatically.
Once this PR is merged, To update the latest tag, run the following workflow.
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
# - name: Debug Job
# if: ${{ failure() }}

View file

@ -1,42 +1,35 @@
on:
pull_request:
types: [closed]
branches:
- main
name: Release tag update --force
workflow_dispatch:
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch."
inputs:
services:
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false"
required: true
default: "false"
name: Force Push tag with main branch HEAD
jobs:
deploy:
name: Build Patch from main
runs-on: ubuntu-latest
if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
env:
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get latest release tag using GitHub API
id: get-latest-tag
run: |
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
| jq -r .tag_name)
# Fallback to git command if API doesn't return a tag
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
echo "Not found latest tag"
exit 100
fi
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
echo "Latest tag: $LATEST_TAG"
- name: Set Remote with GITHUB_TOKEN
run: |
git config --unset http.https://github.com/.extraheader
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
- name: Push main branch to tag
run: |
git fetch --tags
git checkout main
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# with:
# limit-access-to-actor: true

View file

@ -85,8 +85,7 @@ def __generic_query(typename, value_length=None):
ORDER BY value"""
if value_length is None or value_length > 2:
return f"""SELECT DISTINCT ON(value,type) value, type
((SELECT DISTINCT value, type
return f"""(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
@ -102,7 +101,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)) AS raw;"""
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -327,7 +326,7 @@ def __search_metadata(project_id, value, key=None, source=None):
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify(f"""\
SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}))

View file

@ -338,14 +338,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
SELECT details.error_id as error_id,
name, message, users, total,
sessions, last_occurrence, first_occurrence, chart
FROM (SELECT error_id,
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
JSONExtractString(toString(`$properties`), 'name') AS name,
JSONExtractString(toString(`$properties`), 'message') AS message,
COUNT(DISTINCT user_id) AS users,
COUNT(DISTINCT events.session_id) AS sessions,
MAX(created_at) AS max_datetime,
MIN(created_at) AS min_datetime,
COUNT(DISTINCT error_id)
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id'))
OVER() AS total
FROM {MAIN_EVENTS_TABLE} AS events
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
@ -357,7 +357,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id,
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
FROM {MAIN_EVENTS_TABLE}
@ -366,7 +366,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id,
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
gs.generate_series AS timestamp,
COUNT(DISTINCT session_id) AS count
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs

View file

@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
cur.execute(
cur.mogrify(
"""SELECT username, token, url
FROM public.jira_cloud
WHERE user_id = %(user_id)s;""",
FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",
{"user_id": self._user_id})
)
data = helper.dict_to_camel_case(cur.fetchone())
@ -95,9 +95,10 @@ class JIRAIntegration(base.BaseIntegration):
def add(self, username, token, url, obfuscate=False):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(""" \
INSERT INTO public.jira_cloud(username, token, user_id, url)
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
cur.mogrify("""\
INSERT INTO public.jira_cloud(username, token, user_id,url)
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
RETURNING username, token, url;""",
{"user_id": self._user_id, "username": username,
"token": token, "url": url})
)
@ -111,10 +112,9 @@ class JIRAIntegration(base.BaseIntegration):
def delete(self):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(""" \
DELETE
FROM public.jira_cloud
WHERE user_id = %(user_id)s;""",
cur.mogrify("""\
DELETE FROM public.jira_cloud
WHERE user_id=%(user_id)s;""",
{"user_id": self._user_id})
)
return {"state": "success"}
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
changes={
"username": data.username,
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
else self.integration["token"],
else self.integration.token,
"url": str(data.url)
},
obfuscate=True

View file

@ -85,9 +85,6 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
# compute avg_time_from_previous at the same level as sessions_count (this was removed in v1.22)
# if start-point is selected, the selected event is ranked n°1
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
if not data.hide_excess:
data.hide_excess = True
data.rows = 50
sub_events = []
start_points_conditions = []
step_0_conditions = []

View file

@ -153,7 +153,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
"filters": []
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -178,7 +178,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
"filters": []
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -1108,12 +1108,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
is_negative_operator = sh.is_negation_operator(f.operator)
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
r_op = ""
if is_negative_operator:
r_op = sh.reverse_sql_operator(op)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL:
@ -1122,12 +1118,6 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(json_condition(
"main", "$properties", 'status', op, f.value, e_k_f, True, True
@ -1140,13 +1130,6 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
@ -1159,26 +1142,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(json_condition(
"main", "$properties", 'response_body', op, f.value, e_k_f
))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
if is_negative_operator:
events_conditions_not.append(
{
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = sh.multi_conditions(
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
value_key=e_k_f)
else:
logging.warning(f"undefined FETCH filter: {f.type}")
if not apply:
@ -1426,30 +1395,17 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if extra_conditions and len(extra_conditions) > 0:
_extra_or_condition = []
for i, c in enumerate(extra_conditions):
if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
if sh.isAny_opreator(c.operator):
continue
e_k = f"ec_value{i}"
op = sh.get_sql_operator(c.operator)
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**sh.multi_values(c.value, value_key=e_k)}
if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
if c.type == events.EventType.LOCATION.ui_type:
_extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
for j, c_f in enumerate(c.filters):
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
continue
e_k += f"_{j}"
op = sh.get_sql_operator(c_f.operator)
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
full_args = {**full_args,
**sh.multi_values(c_f.value, value_key=e_k)}
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
_extra_or_condition.append(
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c_f.value, value_key=e_k))
else:
logging.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0:

View file

@ -148,7 +148,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
"filters": []
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -165,7 +165,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
"isEvent": True,
"value": [],
"operator": e.operator,
"filters": e.filters
"filters": []
})
for v in e.value:
if v not in extra_conditions[e.operator].value:
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
c.value, value_key=e_k))
else:
logger.warning(f"unsupported extra_event type: {c.type}")
logger.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0:
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
query_part = f"""\

View file

@ -122,10 +122,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
sort = 'session_id'
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
if data.sort == 'datetime':
sort = 'start_ts'
else:
sort = helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project.project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,

View file

@ -34,10 +34,7 @@ if config("CH_COMPRESSION", cast=bool, default=True):
def transform_result(self, original_function):
@wraps(original_function)
def wrapper(*args, **kwargs):
if kwargs.get("parameters"):
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
elif len(args) > 0:
logger.debug(str.encode(args[0]))
logger.debug(str.encode(self.format(query=kwargs.get("query", ""), parameters=kwargs.get("parameters"))))
result = original_function(*args, **kwargs)
if isinstance(result, clickhouse_connect.driver.query.QueryResult):
column_names = result.column_names
@ -149,11 +146,13 @@ class ClickHouseClient:
def __enter__(self):
return self.__client
def format(self, query, parameters=None):
if parameters:
ctx = QueryContext(query=query, parameters=parameters)
return ctx.final_query
return query
def format(self, query, *, parameters=None):
if parameters is None:
return query
return query % {
key: f"'{value}'" if isinstance(value, str) else value
for key, value in parameters.items()
}
def __exit__(self, *args):
if config('CH_POOL', cast=bool, default=True):

View file

@ -4,41 +4,37 @@ import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
if isinstance(op, Enum):
op = op.value
return {
schemas.SearchEventOperator.IS.value: "=",
schemas.SearchEventOperator.ON.value: "=",
schemas.SearchEventOperator.ON_ANY.value: "IN",
schemas.SearchEventOperator.IS_NOT.value: "!=",
schemas.SearchEventOperator.NOT_ON.value: "!=",
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
schemas.SearchEventOperator.IS: "=",
schemas.SearchEventOperator.ON: "=",
schemas.SearchEventOperator.ON_ANY: "IN",
schemas.SearchEventOperator.IS_NOT: "!=",
schemas.SearchEventOperator.NOT_ON: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
# Selector operators:
schemas.ClickEventExtraOperator.IS.value: "=",
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
schemas.ClickEventExtraOperator.IS: "=",
schemas.ClickEventExtraOperator.IS_NOT: "!=",
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
schemas.MathOperator.GREATER.value: ">",
schemas.MathOperator.GREATER_EQ.value: ">=",
schemas.MathOperator.LESS.value: "<",
schemas.MathOperator.LESS_EQ.value: "<=",
schemas.MathOperator.GREATER: ">",
schemas.MathOperator.GREATER_EQ: ">=",
schemas.MathOperator.LESS: "<",
schemas.MathOperator.LESS_EQ: "<=",
}.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator):
if isinstance(op, Enum):
op = op.value
return op in [schemas.SearchEventOperator.IS_NOT.value,
schemas.SearchEventOperator.NOT_ON.value,
schemas.SearchEventOperator.NOT_CONTAINS.value,
schemas.ClickEventExtraOperator.IS_NOT.value,
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
return op in [schemas.SearchEventOperator.IS_NOT,
schemas.SearchEventOperator.NOT_ON,
schemas.SearchEventOperator.NOT_CONTAINS,
schemas.ClickEventExtraOperator.IS_NOT,
schemas.ClickEventExtraOperator.NOT_CONTAINS]
def reverse_sql_operator(op):

View file

@ -0,0 +1,591 @@
-- -- Original Q3
-- WITH ranked_events AS (SELECT *
-- FROM ranked_events_1736344377403),
-- n1 AS (SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- COUNT(1) AS sessions_count
-- FROM ranked_events
-- WHERE event_number_in_session = 1
-- AND isNotNull(next_value)
-- GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n2 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n1
-- INNER JOIN ranked_events AS re
-- ON (n1.next_value = re.e_value AND n1.next_type = re.event_type)
-- WHERE re.event_number_in_session = 2
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n3 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n2
-- INNER JOIN ranked_events AS re
-- ON (n2.next_value = re.e_value AND n2.next_type = re.event_type)
-- WHERE re.event_number_in_session = 3
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n4 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n3
-- INNER JOIN ranked_events AS re
-- ON (n3.next_value = re.e_value AND n3.next_type = re.event_type)
-- WHERE re.event_number_in_session = 4
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8),
-- n5 AS (SELECT *
-- FROM (SELECT re.event_number_in_session AS event_number_in_session,
-- re.event_type AS event_type,
-- re.e_value AS e_value,
-- re.next_type AS next_type,
-- re.next_value AS next_value,
-- COUNT(1) AS sessions_count
-- FROM n4
-- INNER JOIN ranked_events AS re
-- ON (n4.next_value = re.e_value AND n4.next_type = re.event_type)
-- WHERE re.event_number_in_session = 5
-- GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type,
-- re.next_value) AS sub_level
-- ORDER BY sessions_count DESC
-- LIMIT 8)
-- SELECT *
-- FROM (SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n1
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n2
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n3
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n4
-- UNION ALL
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- next_type,
-- next_value,
-- sessions_count
-- FROM n5) AS chart_steps
-- ORDER BY event_number_in_session;
-- Q1
-- CREATE TEMPORARY TABLE pre_ranked_events_1736344377403 AS
CREATE TABLE pre_ranked_events_1736344377403 ENGINE = Memory AS
(WITH initial_event AS (SELECT events.session_id, MIN(datetime) AS start_event_timestamp
FROM experimental.events AS events
WHERE ((event_type = 'LOCATION' AND (url_path = '/en/deployment/')))
AND events.project_id = toUInt16(65)
AND events.datetime >= toDateTime(1735599600000 / 1000)
AND events.datetime < toDateTime(1736290799999 / 1000)
GROUP BY 1),
pre_ranked_events AS (SELECT *
FROM (SELECT session_id,
event_type,
datetime,
url_path AS e_value,
row_number() OVER (PARTITION BY session_id
ORDER BY datetime ,
message_id ) AS event_number_in_session
FROM experimental.events AS events
INNER JOIN initial_event ON (events.session_id = initial_event.session_id)
WHERE events.project_id = toUInt16(65)
AND events.datetime >= toDateTime(1735599600000 / 1000)
AND events.datetime < toDateTime(1736290799999 / 1000)
AND (events.event_type = 'LOCATION')
AND events.datetime >= initial_event.start_event_timestamp
) AS full_ranked_events
WHERE event_number_in_session <= 5)
SELECT *
FROM pre_ranked_events);
;
SELECT *
FROM pre_ranked_events_1736344377403
WHERE event_number_in_session < 3;
-- ---------Q2-----------
-- CREATE TEMPORARY TABLE ranked_events_1736344377403 AS
DROP TABLE ranked_events_1736344377403;
CREATE TABLE ranked_events_1736344377403 ENGINE = Memory AS
(WITH pre_ranked_events AS (SELECT *
FROM pre_ranked_events_1736344377403),
start_points AS (SELECT DISTINCT session_id
FROM pre_ranked_events
WHERE ((event_type = 'LOCATION' AND (e_value = '/en/deployment/')))
AND pre_ranked_events.event_number_in_session = 1),
ranked_events AS (SELECT pre_ranked_events.*,
leadInFrame(e_value)
OVER (PARTITION BY session_id ORDER BY datetime
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value,
leadInFrame(toNullable(event_type))
OVER (PARTITION BY session_id ORDER BY datetime
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type
FROM start_points
INNER JOIN pre_ranked_events USING (session_id))
SELECT *
FROM ranked_events);
-- ranked events
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 2
-- AND e_value='/en/deployment/deploy-docker/'
-- AND next_value NOT IN ('/en/deployment/','/en/plugins/','/en/using-or/')
-- AND e_value NOT IN ('/en/deployment/deploy-docker/','/en/getting-started/','/en/deployment/deploy-ubuntu/')
AND isNotNull(next_value)
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY event_number_in_session, sessions_count DESC;
SELECT event_number_in_session,
event_type,
e_value,
COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value
ORDER BY event_number_in_session, sessions_count DESC;
SELECT COUNT(1) AS sessions_count
FROM ranked_events_1736344377403
WHERE event_number_in_session = 2
AND isNull(next_value)
;
-- ---------Q3 MORE -----------
WITH ranked_events AS (SELECT *
FROM ranked_events_1736344377403),
n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n2 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 2
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n3 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 3
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
drop_n AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n1
WHERE isNull(n1.next_type)
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n2
WHERE isNull(n2.next_type)),
-- TODO: make this as top_steps, where every step will go to next as top/others
top_n1 AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n1
WHERE isNotNull(next_type)
ORDER BY sessions_count DESC
LIMIT 3),
top_n2 AS (-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n2
WHERE (event_type, e_value) IN (SELECT event_type,
e_value
FROM n2
WHERE isNotNull(next_type)
GROUP BY event_type, e_value
ORDER BY SUM(sessions_count) DESC
LIMIT 3)
ORDER BY sessions_count DESC),
top_n AS (SELECT *
FROM top_n1
UNION ALL
SELECT *
FROM top_n2),
u_top_n AS (SELECT DISTINCT event_number_in_session,
event_type,
e_value
FROM top_n),
others_n AS (
-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n1
WHERE isNotNull(next_type)
ORDER BY sessions_count DESC
LIMIT 1000000 OFFSET 3
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count
FROM n2
WHERE isNotNull(next_type)
-- GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 1000000 OFFSET 3)
SELECT *
FROM (
-- Top
SELECT *
FROM top_n
-- UNION ALL
-- -- Others
-- SELECT event_number_in_session,
-- event_type,
-- e_value,
-- 'OTHER' AS next_type,
-- NULL AS next_value,
-- SUM(sessions_count)
-- FROM others_n
-- GROUP BY event_number_in_session, event_type, e_value
-- UNION ALL
-- -- Top go to Drop
-- SELECT drop_n.event_number_in_session,
-- drop_n.event_type,
-- drop_n.e_value,
-- drop_n.next_type,
-- drop_n.next_value,
-- drop_n.sessions_count
-- FROM drop_n
-- INNER JOIN u_top_n ON (drop_n.event_number_in_session = u_top_n.event_number_in_session
-- AND drop_n.event_type = u_top_n.event_type
-- AND drop_n.e_value = u_top_n.e_value)
-- ORDER BY drop_n.event_number_in_session
-- -- -- UNION ALL
-- -- -- Top go to Others
-- SELECT top_n.event_number_in_session,
-- top_n.event_type,
-- top_n.e_value,
-- 'OTHER' AS next_type,
-- NULL AS next_value,
-- SUM(top_n.sessions_count) AS sessions_count
-- FROM top_n
-- LEFT JOIN others_n ON (others_n.event_number_in_session = (top_n.event_number_in_session + 1)
-- AND top_n.next_type = others_n.event_type
-- AND top_n.next_value = others_n.e_value)
-- WHERE others_n.event_number_in_session IS NULL
-- AND top_n.next_type IS NOT NULL
-- GROUP BY event_number_in_session, event_type, e_value
-- UNION ALL
-- -- Others got to Top
-- SELECT others_n.event_number_in_session,
-- 'OTHER' AS event_type,
-- NULL AS e_value,
-- others_n.s_next_type AS next_type,
-- others_n.s_next_value AS next_value,
-- SUM(sessions_count) AS sessions_count
-- FROM others_n
-- INNER JOIN top_n ON (others_n.event_number_in_session = top_n.event_number_in_session + 1 AND
-- others_n.s_next_type = top_n.event_type AND
-- others_n.s_next_value = top_n.event_type)
-- GROUP BY others_n.event_number_in_session, next_type, next_value
-- UNION ALL
-- -- TODO: find if this works or not
-- -- Others got to Others
-- SELECT others_n.event_number_in_session,
-- 'OTHER' AS event_type,
-- NULL AS e_value,
-- 'OTHERS' AS next_type,
-- NULL AS next_value,
-- SUM(sessions_count) AS sessions_count
-- FROM others_n
-- LEFT JOIN u_top_n ON ((others_n.event_number_in_session + 1) = u_top_n.event_number_in_session
-- AND others_n.s_next_type = u_top_n.event_type
-- AND others_n.s_next_value = u_top_n.e_value)
-- WHERE u_top_n.event_number_in_session IS NULL
-- GROUP BY others_n.event_number_in_session
)
ORDER BY event_number_in_session;
-- ---------Q3 TOP ON VALUE ONLY -----------
WITH ranked_events AS (SELECT *
FROM ranked_events_1736344377403),
n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 1
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n2 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 2
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
n3 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
COUNT(1) AS sessions_count
FROM ranked_events
WHERE event_number_in_session = 3
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC),
drop_n AS (-- STEP 1
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n1
WHERE isNull(n1.next_type)
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
'DROP' AS next_type,
NULL AS next_value,
sessions_count
FROM n2
WHERE isNull(n2.next_type)),
top_n AS (SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n1
GROUP BY event_number_in_session, event_type, e_value
LIMIT 1
UNION ALL
-- STEP 2
SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n2
GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 3
UNION ALL
-- STEP 3
SELECT event_number_in_session,
event_type,
e_value,
SUM(sessions_count) AS sessions_count
FROM n3
GROUP BY event_number_in_session, event_type, e_value
ORDER BY sessions_count DESC
LIMIT 3),
top_n_with_next AS (SELECT n1.*
FROM n1
UNION ALL
SELECT n2.*
FROM n2
INNER JOIN top_n ON (n2.event_number_in_session = top_n.event_number_in_session
AND n2.event_type = top_n.event_type
AND n2.e_value = top_n.e_value)),
others_n AS (
-- STEP 2
SELECT n2.*
FROM n2
WHERE (n2.event_number_in_session, n2.event_type, n2.e_value) NOT IN
(SELECT event_number_in_session, event_type, e_value
FROM top_n
WHERE top_n.event_number_in_session = 2)
UNION ALL
-- STEP 3
SELECT n3.*
FROM n3
WHERE (n3.event_number_in_session, n3.event_type, n3.e_value) NOT IN
(SELECT event_number_in_session, event_type, e_value
FROM top_n
WHERE top_n.event_number_in_session = 3))
SELECT *
FROM (
-- SELECT sum(top_n_with_next.sessions_count)
-- FROM top_n_with_next
-- WHERE event_number_in_session = 1
-- -- AND isNotNull(next_value)
-- AND (next_type, next_value) IN
-- (SELECT others_n.event_type, others_n.e_value FROM others_n WHERE others_n.event_number_in_session = 2)
-- -- SELECT * FROM others_n
-- -- SELECT * FROM n2
-- SELECT *
-- FROM top_n
-- );
-- Top to Top: valid
SELECT top_n_with_next.*
FROM top_n_with_next
INNER JOIN top_n
ON (top_n_with_next.event_number_in_session + 1 = top_n.event_number_in_session
AND top_n_with_next.next_type = top_n.event_type
AND top_n_with_next.next_value = top_n.e_value)
UNION ALL
-- Top to Others: valid
SELECT top_n_with_next.event_number_in_session,
top_n_with_next.event_type,
top_n_with_next.e_value,
'OTHER' AS next_type,
NULL AS next_value,
SUM(top_n_with_next.sessions_count) AS sessions_count
FROM top_n_with_next
WHERE (top_n_with_next.event_number_in_session + 1, top_n_with_next.next_type, top_n_with_next.next_value) IN
(SELECT others_n.event_number_in_session, others_n.event_type, others_n.e_value FROM others_n)
GROUP BY top_n_with_next.event_number_in_session, top_n_with_next.event_type, top_n_with_next.e_value
UNION ALL
-- Top go to Drop: valid
SELECT drop_n.event_number_in_session,
drop_n.event_type,
drop_n.e_value,
drop_n.next_type,
drop_n.next_value,
drop_n.sessions_count
FROM drop_n
INNER JOIN top_n ON (drop_n.event_number_in_session = top_n.event_number_in_session
AND drop_n.event_type = top_n.event_type
AND drop_n.e_value = top_n.e_value)
ORDER BY drop_n.event_number_in_session
UNION ALL
-- Others got to Drop: valid
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
'DROP' AS next_type,
NULL AS next_value,
SUM(others_n.sessions_count) AS sessions_count
FROM others_n
WHERE isNull(others_n.next_type)
AND others_n.event_number_in_session < 3
GROUP BY others_n.event_number_in_session, next_type, next_value
UNION ALL
-- Others got to Top:valid
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
others_n.next_type,
others_n.next_value,
SUM(others_n.sessions_count) AS sessions_count
FROM others_n
WHERE isNotNull(others_n.next_type)
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) IN
(SELECT top_n.event_number_in_session, top_n.event_type, top_n.e_value FROM top_n)
GROUP BY others_n.event_number_in_session, others_n.next_type, others_n.next_value
UNION ALL
-- Others got to Others
SELECT others_n.event_number_in_session,
'OTHER' AS event_type,
NULL AS e_value,
'OTHERS' AS next_type,
NULL AS next_value,
SUM(sessions_count) AS sessions_count
FROM others_n
WHERE isNotNull(others_n.next_type)
AND others_n.event_number_in_session < 3
AND (others_n.event_number_in_session + 1, others_n.next_type, others_n.next_value) NOT IN
(SELECT event_number_in_session, event_type, e_value FROM top_n)
GROUP BY others_n.event_number_in_session)
ORDER BY event_number_in_session, sessions_count
DESC;

View file

@ -960,6 +960,36 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
return self
# We don't need this as the UI is expecting filters to override the full series' filters
# @model_validator(mode="after")
# def __merge_out_filters_with_series(self):
# for f in self.filters:
# for s in self.series:
# found = False
#
# if f.is_event:
# sub = s.filter.events
# else:
# sub = s.filter.filters
#
# for e in sub:
# if f.type == e.type and f.operator == e.operator:
# found = True
# if f.is_event:
# # If extra event: append value
# for v in f.value:
# if v not in e.value:
# e.value.append(v)
# else:
# # If extra filter: override value
# e.value = f.value
# if not found:
# sub.append(f)
#
# self.filters = []
#
# return self
# UI is expecting filters to override the full series' filters
@model_validator(mode="after")
def __override_series_filters_with_outer_filters(self):
@ -1030,16 +1060,6 @@ class CardTable(__CardSchema):
values["metricValue"] = []
return values
@model_validator(mode="after")
def __enforce_AND_operator(self):
self.metric_of = MetricOfTable(self.metric_of)
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
for s in self.series:
if s.filter is not None:
s.filter.events_order = SearchEventOrder.AND
return self
@model_validator(mode="after")
def __transform(self):
self.metric_of = MetricOfTable(self.metric_of)
@ -1115,7 +1135,7 @@ class CardPathAnalysis(__CardSchema):
view_type: MetricOtherViewType = Field(...)
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default_factory=list)
density: int = Field(default=4, ge=2, le=10)
rows: int = Field(default=5, ge=1, le=10)
rows: int = Field(default=3, ge=1, le=10)
start_type: Literal["start", "end"] = Field(default="start")
start_point: List[PathAnalysisSubFilterSchema] = Field(default_factory=list)

View file

@ -19,16 +19,14 @@ const EVENTS_DEFINITION = {
}
};
EVENTS_DEFINITION.emit = {
NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED",
AGENTS_INFO_CONNECTED: "AGENTS_INFO_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT,
WEBRTC_CONFIG: "WEBRTC_CONFIG",
NEW_AGENT: "NEW_AGENT",
NO_AGENTS: "NO_AGENT",
AGENT_DISCONNECT: "AGENT_DISCONNECTED",
AGENTS_CONNECTED: "AGENTS_CONNECTED",
NO_SESSIONS: "SESSION_DISCONNECTED",
SESSION_ALREADY_CONNECTED: "SESSION_ALREADY_CONNECTED",
SESSION_RECONNECTED: "SESSION_RECONNECTED",
UPDATE_EVENT: EVENTS_DEFINITION.listen.UPDATE_EVENT
};
const BASE_sessionInfo = {

View file

@ -42,7 +42,7 @@ const findSessionSocketId = async (io, roomId, tabId) => {
};
async function getRoomData(io, roomID) {
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [], config = null, agentInfos = [];
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
const connected_sockets = await io.in(roomID).fetchSockets();
if (connected_sockets.length > 0) {
for (let socket of connected_sockets) {
@ -52,19 +52,13 @@ async function getRoomData(io, roomID) {
} else {
agentsCount++;
agentIDs.push(socket.id);
agentInfos.push({ ...socket.handshake.query.agentInfo, socketId: socket.id });
if (socket.handshake.query.config !== undefined) {
config = socket.handshake.query.config;
}
}
}
} else {
tabsCount = -1;
agentsCount = -1;
agentInfos = [];
agentIDs = [];
}
return {tabsCount, agentsCount, tabIDs, agentIDs, config, agentInfos};
return {tabsCount, agentsCount, tabIDs, agentIDs};
}
function processNewSocket(socket) {
@ -84,7 +78,7 @@ async function onConnect(socket) {
IncreaseOnlineConnections(socket.handshake.query.identity);
const io = getServer();
const {tabsCount, agentsCount, tabIDs, agentInfos, agentIDs, config} = await getRoomData(io, socket.handshake.query.roomId);
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
if (socket.handshake.query.identity === IDENTITIES.session) {
// Check if session with the same tabID already connected, if so, refuse new connexion
@ -106,9 +100,7 @@ async function onConnect(socket) {
// Inform all connected agents about reconnected session
if (agentsCount > 0) {
logger.debug(`notifying new session about agent-existence`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, config);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_INFO_CONNECTED, agentInfos);
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
}
} else if (tabsCount <= 0) {
@ -126,8 +118,7 @@ async function onConnect(socket) {
// Stats
startAssist(socket, socket.handshake.query.agentID);
}
io.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.WEBRTC_CONFIG, socket.handshake.query.config);
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, { ...socket.handshake.query.agentInfo });
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
}
// Set disconnect handler

View file

@ -2,12 +2,11 @@ package datasaver
import (
"context"
"encoding/json"
"openreplay/backend/pkg/db/types"
"openreplay/backend/internal/config/db"
"openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/logger"
. "openreplay/backend/pkg/messages"
queue "openreplay/backend/pkg/queue/types"
@ -51,6 +50,10 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
}
func (s *saverImpl) Handle(msg Message) {
if msg.TypeID() == MsgCustomEvent {
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
}
var (
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
session *sessions.Session
@ -66,23 +69,6 @@ func (s *saverImpl) Handle(msg Message) {
return
}
if msg.TypeID() == MsgCustomEvent {
m := msg.(*CustomEvent)
// Try to parse custom event payload to JSON and extract or_payload field
type CustomEventPayload struct {
CustomTimestamp uint64 `json:"or_timestamp"`
}
customPayload := &CustomEventPayload{}
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
if customPayload.CustomTimestamp >= session.Timestamp {
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
msg.Meta().Timestamp = customPayload.CustomTimestamp
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
}
}
defer s.Handle(types.WrapCustomEvent(m))
}
if IsMobileType(msg.TypeID()) {
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
if !postgres.IsPkeyViolation(err) {

View file

@ -135,11 +135,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add tracker version to context
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Handler's logic
if req.ProjectKey == nil {
@ -162,6 +157,13 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
// Add projectID to context
r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID)))
// Validate tracker version
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
return
}
// Check if the project supports mobile sessions
if !p.IsWeb() {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize)

3
ee/api/.gitignore vendored
View file

@ -225,7 +225,8 @@ Pipfile.lock
/chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps
/chalicelib/core/sourcemaps.py
/chalicelib/core/sourcemaps_parser.py
/chalicelib/core/tags.py
/chalicelib/saml
/chalicelib/utils/__init__.py

View file

@ -86,8 +86,7 @@ def __generic_query(typename, value_length=None):
ORDER BY value"""
if value_length is None or value_length > 2:
return f"""SELECT DISTINCT ON(value, type) value, type
FROM ((SELECT DISTINCT value, type
return f"""(SELECT DISTINCT value, type
FROM {TABLE}
WHERE
project_id = %(project_id)s
@ -103,7 +102,7 @@ def __generic_query(typename, value_length=None):
AND type='{typename.upper()}'
AND value ILIKE %(value)s
ORDER BY value
LIMIT 5)) AS raw;"""
LIMIT 5);"""
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -258,7 +257,7 @@ def __search_metadata(project_id, value, key=None, source=None):
WHERE project_id = %(project_id)s
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)})

View file

@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data):
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
ch_basic_query.append("error_id = %(error_id)s")
ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s")
with ch_client.ClickHouseClient() as ch:
data["startDate24"] = TimeUTC.now(-1)
@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data):
"error_id": error_id}
main_ch_query = f"""\
WITH pre_processed AS (SELECT error_id,
WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id,
toString(`$properties`.name) AS name,
toString(`$properties`.message) AS message,
session_id,
@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
AND error_id = %(error_id)s
AND toString(`$properties`.error_id) = %(error_id)s
GROUP BY timestamp
ORDER BY timestamp) AS chart_details
) AS chart_details24 ON TRUE
@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data):
AND `$event_name` = 'ERROR'
AND events.created_at >= toDateTime(timestamp / 1000)
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
AND error_id = %(error_id)s
AND toString(`$properties`.error_id) = %(error_id)s
GROUP BY timestamp
ORDER BY timestamp) AS chart_details
) AS chart_details30 ON TRUE;"""

View file

@ -141,7 +141,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project: schemas.
) AS users_sessions;""",
full_args)
elif ids_only:
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id AS session_id
main_query = cur.format(query=f"""SELECT DISTINCT ON(s.session_id) s.session_id
{query_part}
ORDER BY s.session_id desc
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",

View file

@ -927,12 +927,12 @@ def authenticate_sso(email: str, internal_id: str):
aud=AUDIENCE, jwt_jti=j_r.jwt_refresh_jti),
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE, for_spot=True),
iat=j_r.spot_jwt_iat, aud=spot.AUDIENCE),
"spotRefreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
tenant_id=r['tenantId'],
iat=j_r.spot_jwt_refresh_iat,
aud=spot.AUDIENCE,
jwt_jti=j_r.spot_jwt_refresh_jti, for_spot=True),
jwt_jti=j_r.spot_jwt_refresh_jti),
"spotRefreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int)
}
return response

View file

@ -46,7 +46,8 @@ rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps
rm -rf ./chalicelib/core/sourcemaps.py
rm -rf ./chalicelib/core/sourcemaps_parser.py
rm -rf ./chalicelib/core/user_testing.py
rm -rf ./chalicelib/core/tags.py
rm -rf ./chalicelib/saml

View file

@ -6,12 +6,13 @@
"packages": {
"": {
"name": "assist-server",
"version": "v1.12.0-ee",
"version": "v1.22.0-ee",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"@fastify/deepmerge": "^2.0.1",
"@maxmind/geoip2-node": "^4.2.0",
"@socket.io/redis-adapter": "^8.2.1",
"async-mutex": "^0.5.0",
"express": "^4.21.1",
"jsonwebtoken": "^9.0.2",
"prom-client": "^15.0.0",
@ -202,6 +203,14 @@
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
"integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="
},
"node_modules/async-mutex": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.5.0.tgz",
"integrity": "sha512-1A94B18jkJ3DYq284ohPxoXbfTA5HsQ7/Mf4DEhcyLx3Bz27Rh59iScbB6EPiP+B+joue6YCxcMXSbFC1tZKwA==",
"dependencies": {
"tslib": "^2.4.0"
}
},
"node_modules/base64id": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz",
@ -1389,6 +1398,11 @@
"node": ">= 14.0.0"
}
},
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",

View file

@ -21,6 +21,7 @@
"@fastify/deepmerge": "^2.0.1",
"@maxmind/geoip2-node": "^4.2.0",
"@socket.io/redis-adapter": "^8.2.1",
"async-mutex": "^0.5.0",
"express": "^4.21.1",
"jsonwebtoken": "^9.0.2",
"prom-client": "^15.0.0",

View file

@ -22,7 +22,7 @@ const {createClient} = require("redis");
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
const pubClient = createClient({url: REDIS_URL});
const subClient = pubClient.duplicate();
logger.info(`Using Redis: ${REDIS_URL}`);
logger.info(`Using Redis in cluster-adapter: ${REDIS_URL}`);
const wsRouter = express.Router();
wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete); // autocomplete

View file

@ -1,52 +1,27 @@
const _io = require("socket.io");
const {getCompressionConfig} = require("./helper");
const {logger} = require('./logger');
const {Mutex} = require('async-mutex');
let io;
const getServer = function () {return io;}
const useRedis = process.env.redis === "true";
let inMemorySocketsCache = [];
let lastCacheUpdateTime = 0;
const CACHE_REFRESH_INTERVAL = parseInt(process.env.cacheRefreshInterval) || 5000;
const cacheExpiration = parseInt(process.env.cacheExpiration) || 10; // in seconds
const mutexTimeout = parseInt(process.env.mutexTimeout) || 10000; // in milliseconds
const fetchMutex = new Mutex();
const fetchAllSocketsResultKey = 'fetchSocketsResult';
let lastKnownResult = [];
const doFetchAllSockets = async function () {
if (useRedis) {
const now = Date.now();
logger.info(`Using in-memory cache (age: ${now - lastCacheUpdateTime}ms)`);
return inMemorySocketsCache;
} else {
try {
return await io.fetchSockets();
} catch (error) {
logger.error('Error fetching sockets:', error);
return [];
}
}
}
// Background refresher that runs independently of requests
let cacheRefresher = null;
function startCacheRefresher() {
if (cacheRefresher) clearInterval(cacheRefresher);
cacheRefresher = setInterval(async () => {
const now = Date.now();
// Only refresh if cache is stale
if (now - lastCacheUpdateTime >= CACHE_REFRESH_INTERVAL) {
logger.debug('Background refresh triggered');
try {
const startTime = performance.now();
const result = await io.fetchSockets();
inMemorySocketsCache = result;
lastCacheUpdateTime = now;
const duration = performance.now() - startTime;
logger.info(`Background refresh complete: ${duration}ms, ${result.length} sockets`);
} catch (error) {
logger.error(`Background refresh error: ${error}`);
}
}
}, CACHE_REFRESH_INTERVAL / 2);
// Cache layer
let redisClient;
if (useRedis) {
const {createClient} = require("redis");
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
redisClient = createClient({url: REDIS_URL});
redisClient.on("error", (error) => logger.error(`Redis error : ${error}`));
void redisClient.connect();
logger.info(`Using Redis for cache: ${REDIS_URL}`);
}
const processSocketsList = function (sockets) {
@ -58,6 +33,44 @@ const processSocketsList = function (sockets) {
return res
}
const doFetchAllSockets = async function () {
if (useRedis) {
try {
let cachedResult = await redisClient.get(fetchAllSocketsResultKey);
if (cachedResult) {
return JSON.parse(cachedResult);
}
return await fetchMutex.runExclusive(async () => {
try {
cachedResult = await redisClient.get(fetchAllSocketsResultKey);
if (cachedResult) {
return JSON.parse(cachedResult);
}
let result = await io.fetchSockets();
let cachedString = JSON.stringify(processSocketsList(result));
lastKnownResult = result;
await redisClient.set(fetchAllSocketsResultKey, cachedString, {EX: cacheExpiration});
return result;
} catch (err) {
logger.error('Error fetching new sockets:', err);
return lastKnownResult;
}
}, mutexTimeout);
} catch (error) {
logger.error('Error fetching cached sockets:', error);
return lastKnownResult;
}
}
try {
let result = await io.fetchSockets();
lastKnownResult = result;
return result;
} catch (error) {
logger.error('Error fetching sockets:', error);
return lastKnownResult;
}
}
const fetchSockets = async function (roomID) {
if (!io) {
return [];
@ -96,7 +109,6 @@ const createSocketIOServer = function (server, prefix) {
});
io.attachApp(server);
}
startCacheRefresher();
return io;
}

View file

@ -1,16 +1,3 @@
SELECT 1
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
SELECT 1
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
FROM experimental.sessions) AS raw
LIMIT 1;
ALTER TABLE experimental.sessions
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
SET allow_experimental_json_type = 1;
@ -164,7 +151,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
TTL _timestamp + INTERVAL 1 MONTH ,
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested,
-- according to a specific event_name and optional properties

View file

@ -9,7 +9,8 @@ CREATE TABLE IF NOT EXISTS experimental.autocomplete
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value);
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.events
(
@ -86,7 +87,8 @@ CREATE TABLE IF NOT EXISTS experimental.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id);
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
@ -106,7 +108,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String),
user_state LowCardinality(String),
platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32,
@ -138,6 +140,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 3 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
@ -149,7 +152,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
sign Int8
) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id);
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
(
@ -159,7 +163,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, session_id);
ORDER BY (project_id, user_id, session_id)
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
@ -169,7 +174,8 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id);
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.issues
(
@ -182,7 +188,8 @@ CREATE TABLE IF NOT EXISTS experimental.issues
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type);
ORDER BY (project_id, issue_id, type)
TTL _timestamp + INTERVAL 3 MONTH;
@ -285,7 +292,8 @@ CREATE TABLE IF NOT EXISTS experimental.sessions_feature_flags
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id);
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id)
TTL datetime + INTERVAL 3 MONTH;
CREATE TABLE IF NOT EXISTS experimental.ios_events
(
@ -321,7 +329,8 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id);
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 3 MONTH;
SET allow_experimental_json_type = 1;
@ -475,7 +484,8 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
ORDER BY (project_id, "$event_name", created_at, session_id)
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
TTL _timestamp + INTERVAL 1 MONTH ,
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
-- The list of events that should not be ingested,
-- according to a specific event_name and optional properties

View file

@ -1,4 +1,5 @@
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
import React, { Suspense, lazy } from 'react';
import { Redirect, Route, Switch } from 'react-router-dom';
import { observer } from 'mobx-react-lite';
@ -9,7 +10,7 @@ import { Loader } from 'UI';
import APIClient from './api_client';
import * as routes from './routes';
import { debounceCall } from '@/utils';
import { debounce } from '@/utils';
const components: any = {
SessionPure: lazy(() => import('Components/Session/Session')),
@ -87,6 +88,7 @@ const ASSIST_PATH = routes.assist();
const LIVE_SESSION_PATH = routes.liveSession();
const MULTIVIEW_PATH = routes.multiview();
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
const ASSIST_STATS_PATH = routes.assistStats();
const USABILITY_TESTING_PATH = routes.usabilityTesting();
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
@ -97,6 +99,7 @@ const SPOT_PATH = routes.spot();
const SCOPE_SETUP = routes.scopeSetup();
const HIGHLIGHTS_PATH = routes.highlights();
let debounceSearch: any = () => {};
function PrivateRoutes() {
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
@ -121,10 +124,14 @@ function PrivateRoutes() {
}
}, [siteId]);
React.useEffect(() => {
debounceSearch = debounce(() => searchStore.fetchSessions(), 500);
}, []);
React.useEffect(() => {
if (!searchStore.urlParsed) return;
debounceCall(() => searchStore.fetchSessions(true), 250)()
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
debounceSearch();
}, [searchStore.instance.filters, searchStore.instance.eventsOrder]);
return (
<Suspense fallback={<Loader loading className="flex-1" />}>

View file

@ -1,7 +1,7 @@
import React, { useState, useEffect } from 'react';
import cn from 'classnames';
import Counter from 'App/components/shared/SessionItem/Counter';
import { useDraggable } from '@neodrag/react';
import Draggable from 'react-draggable';
import type { LocalStream } from 'Player';
import { PlayerContext } from 'App/components/Session/playerContext';
import ChatControls from '../ChatControls/ChatControls';
@ -25,8 +25,6 @@ function ChatWindow({
isPrestart,
}: Props) {
const { t } = useTranslation();
const dragRef = React.useRef<HTMLDivElement>(null);
useDraggable(dragRef, { bounds: 'body', defaultPosition: { x: 50, y: 200 } })
const { player } = React.useContext(PlayerContext);
const { toggleVideoLocalStream } = player.assistManager;
@ -41,7 +39,11 @@ function ChatWindow({
}, [localVideoEnabled]);
return (
<div ref={dragRef}>
<Draggable
handle=".handle"
bounds="body"
defaultPosition={{ x: 50, y: 200 }}
>
<div
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
style={{ width: '280px' }}
@ -100,7 +102,7 @@ function ChatWindow({
isPrestart={isPrestart}
/>
</div>
</div>
</Draggable>
);
}

View file

@ -16,10 +16,10 @@ function ProfilerDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerProfiler from '@openreplay/tracker-profiler';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,12 @@ export const profiler = tracker.use(trackerProfiler());
const fn = profiler('call_name')(() => {
//...
}, thisArg); // thisArg is optional`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerProfiler from '@openreplay/tracker-profiler/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -7,17 +7,19 @@ import { useTranslation } from 'react-i18next';
function AssistNpm(props) {
const { t } = useTranslation();
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerAssist from '@openreplay/tracker-assist';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}',
});
tracker.start()
tracker.use(trackerAssist(options)); // check the list of available options below`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerAssist from '@openreplay/tracker-assist/cjs';
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${props.projectKey}'
});
const trackerAssist = tracker.use(trackerAssist(options)); // check the list of available options below

View file

@ -14,19 +14,20 @@ function GraphQLDoc() {
const projectKey = siteId
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import OpenReplay from '@openreplay/tracker';
const usage = `import { tracker } from '@openreplay/tracker';
import trackerGraphQL from '@openreplay/tracker-graphql';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
//...
export const recordGraphQL = tracker.use(trackerGraphQL());`;
const usageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const usageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerGraphQL from '@openreplay/tracker-graphql/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -15,20 +15,21 @@ function MobxDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const mobxUsage = `import OpenReplay from '@openreplay/tracker';
const mobxUsage = `import { tracker } from '@openreplay/tracker';
import trackerMobX from '@openreplay/tracker-mobx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below
tracker.start();
`;
const mobxUsageCjs = `import OpenReplay from '@openreplay/tracker/cjs';
const mobxUsageCjs = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerMobX from '@openreplay/tracker-mobx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.use(trackerMobX(<options>)); // check list of available options below

View file

@ -16,10 +16,10 @@ function NgRxDoc() {
: sites[0]?.projectKey;
const usage = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerNgRx from '@openreplay/tracker-ngrx';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -32,10 +32,11 @@ const metaReducers = [tracker.use(trackerNgRx(<options>))]; // check list of ava
export class AppModule {}`;
const usageCjs = `import { StoreModule } from '@ngrx/store';
import { reducers } from './reducers';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerNgRx from '@openreplay/tracker-ngrx/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -17,10 +17,10 @@ function PiniaDoc() {
? sites.find((site) => site.id === siteId)?.projectKey
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()

View file

@ -16,10 +16,10 @@ function ReduxDoc() {
: sites[0]?.projectKey;
const usage = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerRedux from '@openreplay/tracker-redux';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = createStore(
applyMiddleware(tracker.use(trackerRedux(<options>))) // check list of available options below
);`;
const usageCjs = `import { applyMiddleware, createStore } from 'redux';
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerRedux from '@openreplay/tracker-redux/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,10 +16,10 @@ function VueDoc() {
: sites[0]?.projectKey;
const usage = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerVuex from '@openreplay/tracker-vuex';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
tracker.start()
@ -29,10 +29,11 @@ const store = new Vuex.Store({
plugins: [tracker.use(trackerVuex(<options>))] // check list of available options below
});`;
const usageCjs = `import Vuex from 'vuex'
import OpenReplay from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerVuex from '@openreplay/tracker-vuex/cjs';
//...
const tracker = new OpenReplay({
tracker.configure({
projectKey: '${projectKey}'
});
//...

View file

@ -16,11 +16,10 @@ function ZustandDoc(props) {
: sites[0]?.projectKey;
const usage = `import create from "zustand";
import Tracker from '@openreplay/tracker';
import { tracker } from '@openreplay/tracker';
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});
@ -43,11 +42,12 @@ const useBearStore = create(
)
`;
const usageCjs = `import create from "zustand";
import Tracker from '@openreplay/tracker/cjs';
import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
import trackerZustand, { StateLogger } from '@openreplay/tracker-zustand/cjs';
const tracker = new Tracker({
tracker.configure({
projectKey: ${projectKey},
});

View file

@ -24,7 +24,7 @@ function ModuleCard(props: Props) {
<Switch
size="small"
checked={!module.isEnabled}
title={!module.isEnabled ? 'Enabled' : 'Disabled'}
title={module.isEnabled ? 'Enabled' : 'Disabled'}
onChange={() => props.onToggle(module)}
/>
</div>

View file

@ -40,12 +40,11 @@ function Modules() {
};
useEffect(() => {
const moduleList = list(t)
moduleList.forEach((module) => {
list(t).forEach((module) => {
module.isEnabled = modules.includes(module.key);
});
setModulesState(
moduleList.filter(
list(t).filter(
(module) => !module.hidden && (!module.enterprise || isEnterprise),
),
);

View file

@ -6,7 +6,6 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying';
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode';
import DebugLog from './DebugLog';
import { useTranslation } from 'react-i18next';
@ -36,7 +35,6 @@ function SessionsListingSettings() {
<div className="flex flex-col gap-2">
<MouseTrailSettings />
<DebugLog />
<VirtualModeSettings />
</div>
</div>
</div>

View file

@ -6,7 +6,6 @@ import CardSessionsByList from 'Components/Dashboard/Widgets/CardSessionsByList'
import { useModal } from 'Components/ModalContext';
import Widget from '@/mstore/types/widget';
import { useTranslation } from 'react-i18next';
import { FilterKey } from 'Types/filter/filterType';
interface Props {
metric?: any;
@ -36,20 +35,20 @@ function SessionsBy(props: Props) {
...filtersMap[metric.metricOf],
value: [data.name],
type: filtersMap[metric.metricOf].key,
filters: [],
filters: filtersMap[metric.metricOf].filters?.map((f: any) => {
const {
key,
operatorOptions,
category,
icon,
label,
options,
...cleaned
} = f;
return { ...cleaned, type: f.key, value: [] };
}),
};
if (metric.metricOf === FilterKey.FETCH) {
baseFilter.filters = [
{
key: FilterKey.FETCH_URL,
operator: 'is',
value: [data.name],
type: FilterKey.FETCH_URL,
}
];
}
const {
key,
operatorOptions,

View file

@ -23,7 +23,6 @@ function BottomButtons({
<Button
loading={loading}
type="primary"
htmlType="submit"
disabled={loading || !instance.validate()}
id="submit-button"
>

View file

@ -43,7 +43,7 @@ function ClickMapRagePicker() {
<Checkbox onChange={onToggle} label={t('Include rage clicks')} />
<Button size="small" onClick={refreshHeatmapSession}>
{t('Get new image')}
{t('Get new session')}
</Button>
</div>
);

View file

@ -64,7 +64,6 @@ function DashboardView(props: Props) {
};
useEffect(() => {
dashboardStore.resetPeriod();
if (queryParams.has('modal')) {
onAddWidgets();
trimQuery();

View file

@ -117,6 +117,8 @@ const ListView: React.FC<Props> = ({
if (disableSelection) {
const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
history.push(path);
} else {
toggleSelection?.(metric.metricId);
}
};

View file

@ -68,7 +68,7 @@ function MetricsList({
}, [metricStore]);
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== '';
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== 'all';
const searchImageDimensions = { width: 60, height: 'auto' };
const defaultImageDimensions = { width: 600, height: 'auto' };

View file

@ -181,10 +181,9 @@ function WidgetChart(props: Props) {
}
prevMetricRef.current = _metric;
const timestmaps = drillDownPeriod.toTimestamps();
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
const payload = isSaved
? { ...metricParams, density }
: { ...params, ...timestmaps, ..._metric.toJson(), density };
? { ...metricParams }
: { ...params, ...timestmaps, ..._metric.toJson() };
debounceRequest(
_metric,
payload,

View file

@ -11,7 +11,6 @@ import { useTranslation } from 'react-i18next';
const initTableProps = [
{
title: <span className="font-medium">Series</span>,
_pureTitle: 'Series',
dataIndex: 'seriesName',
key: 'seriesName',
sorter: (a, b) => a.seriesName.localeCompare(b.seriesName),
@ -19,7 +18,6 @@ const initTableProps = [
},
{
title: <span className="font-medium">Avg.</span>,
_pureTitle: 'Avg.',
dataIndex: 'average',
key: 'average',
sorter: (a, b) => a.average - b.average,
@ -96,8 +94,6 @@ function WidgetDatatable(props: Props) {
tableCols.push({
title: <span className="font-medium">{name}</span>,
dataIndex: `${name}_${i}`,
// @ts-ignore
_pureTitle: name,
key: `${name}_${i}`,
sorter: (a, b) => a[`${name}_${i}`] - b[`${name}_${i}`],
});

View file

@ -55,7 +55,7 @@ function RangeGranularity({
}
const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
export function calculateGranularities(periodDurationMs: number) {
function calculateGranularities(periodDurationMs: number) {
const granularities = [
{ label: 'Hourly', durationMs: 60 * 60 * 1000 },
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },

View file

@ -1,395 +1,376 @@
import React, {useEffect, useState} from 'react';
import {NoContent, Loader, Pagination} from 'UI';
import {Button, Tag, Tooltip, Dropdown, message} from 'antd';
import {UndoOutlined, DownOutlined} from '@ant-design/icons';
import React, { useEffect, useState } from 'react';
import { NoContent, Loader, Pagination } from 'UI';
import { Button, Tag, Tooltip, Dropdown, message } from 'antd';
import { UndoOutlined, DownOutlined } from '@ant-design/icons';
import cn from 'classnames';
import {useStore} from 'App/mstore';
import { useStore } from 'App/mstore';
import SessionItem from 'Shared/SessionItem';
import {observer} from 'mobx-react-lite';
import {DateTime} from 'luxon';
import {debounce, numberWithCommas} from 'App/utils';
import { observer } from 'mobx-react-lite';
import { DateTime } from 'luxon';
import { debounce, numberWithCommas } from 'App/utils';
import useIsMounted from 'App/hooks/useIsMounted';
import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG';
import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card';
import {useTranslation} from 'react-i18next';
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card';
import { useTranslation } from 'react-i18next';
interface Props {
className?: string;
className?: string;
}
function WidgetSessions(props: Props) {
const {t} = useTranslation();
const listRef = React.useRef<HTMLDivElement>(null);
const {className = ''} = props;
const [activeSeries, setActiveSeries] = useState('all');
const [data, setData] = useState<any>([]);
const isMounted = useIsMounted();
const [loading, setLoading] = useState(false);
// all filtering done through series now
const filteredSessions = getListSessionsBySeries(data, 'all');
const {dashboardStore, metricStore, sessionStore, customFieldStore} =
useStore();
const focusedSeries = metricStore.focusedSeriesName;
const filter = dashboardStore.drillDownFilter;
const widget = metricStore.instance;
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const [seriesOptions, setSeriesOptions] = useState([
{label: t('All'), value: 'all'},
]);
const hasFilters =
filter.filters.length > 0 ||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
const metaList = customFieldStore.list.map((i: any) => i.key);
const { t } = useTranslation();
const listRef = React.useRef<HTMLDivElement>(null);
const { className = '' } = props;
const [activeSeries, setActiveSeries] = useState('all');
const [data, setData] = useState<any>([]);
const isMounted = useIsMounted();
const [loading, setLoading] = useState(false);
// all filtering done through series now
const filteredSessions = getListSessionsBySeries(data, 'all');
const { dashboardStore, metricStore, sessionStore, customFieldStore } =
useStore();
const focusedSeries = metricStore.focusedSeriesName;
const filter = dashboardStore.drillDownFilter;
const widget = metricStore.instance;
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
'LLL dd, yyyy HH:mm',
);
const [seriesOptions, setSeriesOptions] = useState([
{ label: t('All'), value: 'all' },
]);
const hasFilters =
filter.filters.length > 0 ||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
const metaList = customFieldStore.list.map((i: any) => i.key);
const seriesDropdownItems = seriesOptions.map((option) => ({
key: option.value,
label: (
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
),
const seriesDropdownItems = seriesOptions.map((option) => ({
key: option.value,
label: (
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
),
}));
useEffect(() => {
if (!widget.series) return;
const seriesOptions = widget.series.map((item: any) => ({
label: item.name,
value: item.seriesId ?? item.name,
}));
setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]);
}, [widget.series.length]);
useEffect(() => {
if (!widget.series) return;
const seriesOptions = widget.series.map((item: any) => ({
label: item.name,
value: item.seriesId ?? item.name,
}));
setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]);
}, [widget.series.length]);
const fetchSessions = (metricId: any, filter: any) => {
if (!isMounted()) return;
setLoading(true);
delete filter.eventsOrderSupport;
if (widget.metricType === FUNNEL) {
if (filter.series[0].filter.filters.length === 0) {
setLoading(false);
return setData([]);
}
}
const fetchSessions = (metricId: any, filter: any) => {
if (!isMounted()) return;
if (widget.metricType === FUNNEL) {
if (filter.series[0].filter.filters.length === 0) {
setLoading(false);
return setData([]);
}
widget
.fetchSessions(metricId, filter)
.then((res: any) => {
setData(res);
if (metricStore.drillDown) {
setTimeout(() => {
message.info(t('Sessions Refreshed!'));
listRef.current?.scrollIntoView({ behavior: 'smooth' });
metricStore.setDrillDown(false);
}, 0);
}
})
.finally(() => {
setLoading(false);
});
};
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
sessionStore.getSessions(customFilters).then((data) => {
setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]);
});
};
const debounceRequest: any = React.useCallback(
debounce(fetchSessions, 1000),
[],
);
const debounceClickMapSearch = React.useCallback(
debounce(fetchClickmapSessions, 1000),
[],
);
const depsString = JSON.stringify(widget.series);
setLoading(true);
const filterCopy = {...filter};
delete filterCopy.eventsOrderSupport;
try {
// Handle filters properly with null checks
if (filterCopy.filters && filterCopy.filters.length > 0) {
// Ensure the nested path exists before pushing
if (filterCopy.series?.[0]?.filter) {
if (!filterCopy.series[0].filter.filters) {
filterCopy.series[0].filter.filters = [];
}
filterCopy.series[0].filter.filters.push(...filterCopy.filters);
}
filterCopy.filters = [];
}
} catch (e) {
// do nothing
const loadData = () => {
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
const clickFilter = {
value: [metricStore.clickMapSearch],
type: 'CLICK',
operator: 'onSelector',
isEvent: true,
// @ts-ignore
filters: [],
};
const timeRange = {
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
startDate: dashboardStore.drillDownPeriod.start,
endDate: dashboardStore.drillDownPeriod.end,
};
const customFilter = {
...filter,
...timeRange,
filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
}
widget
.fetchSessions(metricId, filterCopy)
.then((res: any) => {
setData(res);
if (metricStore.drillDown) {
setTimeout(() => {
message.info(t('Sessions Refreshed!'));
listRef.current?.scrollIntoView({behavior: 'smooth'});
metricStore.setDrillDown(false);
}, 0);
}
})
.finally(() => {
setLoading(false);
});
};
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
sessionStore.getSessions(customFilters).then((data) => {
setData([{...data, seriesId: 1, seriesName: 'Clicks'}]);
});
};
const debounceRequest: any = React.useCallback(
debounce(fetchSessions, 1000),
[],
);
const debounceClickMapSearch = React.useCallback(
debounce(fetchClickmapSessions, 1000),
[],
);
}
debounceRequest(widget.metricId, {
...filter,
series: seriesJson,
page: metricStore.sessionsPage,
limit: metricStore.sessionsPageSize,
});
}
};
useEffect(() => {
metricStore.updateKey('sessionsPage', 1);
loadData();
}, [
filter.startTimestamp,
filter.endTimestamp,
filter.filters,
depsString,
metricStore.clickMapSearch,
focusedSeries,
widget.startPoint,
widget.data.nodes,
metricStore.disabledSeries.length,
]);
useEffect(loadData, [metricStore.sessionsPage]);
useEffect(() => {
if (activeSeries === 'all') {
metricStore.setFocusedSeriesName(null);
} else {
metricStore.setFocusedSeriesName(
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
const depsString = JSON.stringify(widget.series);
const clearFilters = () => {
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
const loadData = () => {
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
const clickFilter = {
value: [metricStore.clickMapSearch],
type: 'CLICK',
operator: 'onSelector',
isEvent: true,
// @ts-ignore
filters: [],
};
const timeRange = {
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
startDate: dashboardStore.drillDownPeriod.start,
endDate: dashboardStore.drillDownPeriod.end,
};
const customFilter = {
...filter,
...timeRange,
filters: [...sessionStore.userFilter.filters, clickFilter],
};
debounceClickMapSearch(customFilter);
} else {
const hasStartPoint =
!!widget.startPoint && widget.metricType === USER_PATH;
const onlyFocused = focusedSeries
? widget.series.filter((s) => s.name === focusedSeries)
: widget.series;
const activeSeries = metricStore.disabledSeries.length
? onlyFocused.filter(
(s) => !metricStore.disabledSeries.includes(s.name),
)
: onlyFocused;
const seriesJson = activeSeries.map((s) => s.toJson());
if (hasStartPoint) {
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
}
if (widget.metricType === USER_PATH) {
if (
seriesJson[0].filter.filters[0].value[0] === '' &&
widget.data.nodes?.length
) {
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
} else if (
seriesJson[0].filter.filters[0].value[0] === '' &&
!widget.data.nodes?.length
) {
// no point requesting if we don't have starting point picked by api
return;
}
}
debounceRequest(widget.metricId, {
...filter,
series: seriesJson,
page: metricStore.sessionsPage,
limit: metricStore.sessionsPageSize,
});
}
};
useEffect(() => {
metricStore.updateKey('sessionsPage', 1);
loadData();
}, [
filter.startTimestamp,
filter.endTimestamp,
filter.filters,
depsString,
metricStore.clickMapSearch,
focusedSeries,
widget.startPoint,
widget.data.nodes,
metricStore.disabledSeries.length,
]);
useEffect(loadData, [metricStore.sessionsPage]);
useEffect(() => {
if (activeSeries === 'all') {
metricStore.setFocusedSeriesName(null);
} else {
metricStore.setFocusedSeriesName(
seriesOptions.find((option) => option.value === activeSeries)?.label,
false,
);
}
}, [activeSeries]);
useEffect(() => {
if (focusedSeries) {
setActiveSeries(
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
'all',
);
} else {
setActiveSeries('all');
}
}, [focusedSeries]);
const clearFilters = () => {
metricStore.updateKey('sessionsPage', 1);
dashboardStore.resetDrillDownFilter();
};
return (
<div
className={cn(
className,
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
)}
>
<div className="flex items-center justify-between">
<div>
<div className="flex items-baseline gap-2">
<h2 className="text-xl">
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
</h2>
<div className="ml-2 color-gray-medium">
{metricStore.clickMapLabel
? `on "${metricStore.clickMapLabel}" `
: null}
{t('between')}{' '}
<span className="font-medium color-gray-darkest">
return (
<div
className={cn(
className,
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
)}
>
<div className="flex items-center justify-between">
<div>
<div className="flex items-baseline gap-2">
<h2 className="text-xl">
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
</h2>
<div className="ml-2 color-gray-medium">
{metricStore.clickMapLabel
? `on "${metricStore.clickMapLabel}" `
: null}
{t('between')}{' '}
<span className="font-medium color-gray-darkest">
{startTime}
</span>{' '}
{t('and')}{' '}
<span className="font-medium color-gray-darkest">
{t('and')}{' '}
<span className="font-medium color-gray-darkest">
{endTime}
</span>{' '}
</div>
{hasFilters && (
<Tooltip title={t('Clear Drilldown')} placement="top">
<Button type="text" size="small" onClick={clearFilters}>
<UndoOutlined/>
</Button>
</Tooltip>
)}
</div>
</div>
{hasFilters && (
<Tooltip title={t('Clear Drilldown')} placement="top">
<Button type="text" size="small" onClick={clearFilters}>
<UndoOutlined />
</Button>
</Tooltip>
)}
</div>
{hasFilters && widget.metricType === 'table' && (
<div className="py-2">
<Tag
closable
onClose={clearFilters}
className="truncate max-w-44 rounded-lg"
>
{filterText}
</Tag>
</div>
)}
</div>
{hasFilters && widget.metricType === 'table' && (
<div className="py-2">
<Tag
closable
onClose={clearFilters}
className="truncate max-w-44 rounded-lg"
>
{filterText}
</Tag>
</div>
)}
</div>
<div className="flex items-center gap-4">
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
<div className="flex items-center ml-6">
<div className="flex items-center gap-4">
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
<div className="flex items-center ml-6">
<span className="mr-2 color-gray-medium">
{t('Filter by Series')}
</span>
<Dropdown
menu={{
items: seriesDropdownItems,
selectable: true,
selectedKeys: [activeSeries],
}}
trigger={['click']}
>
<Button type="text" size="small">
{seriesOptions.find((option) => option.value === activeSeries)
?.label || t('Select Series')}
<DownOutlined/>
</Button>
</Dropdown>
</div>
)}
</div>
<Dropdown
menu={{
items: seriesDropdownItems,
selectable: true,
selectedKeys: [activeSeries],
}}
trigger={['click']}
>
<Button type="text" size="small">
{seriesOptions.find((option) => option.value === activeSeries)
?.label || t('Select Series')}
<DownOutlined />
</Button>
</Dropdown>
</div>
)}
</div>
</div>
<div className="mt-3">
<Loader loading={loading}>
<NoContent
title={
<div className="flex items-center justify-center flex-col">
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60}/>
<div className="mt-4"/>
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
</div>
</div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b"/>
</React.Fragment>
))}
<div className="mt-3">
<Loader loading={loading}>
<NoContent
title={
<div className="flex items-center justify-center flex-col">
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60} />
<div className="mt-4" />
<div className="text-center">
{t('No relevant sessions found for the selected time period')}
</div>
</div>
}
show={filteredSessions.sessions.length === 0}
>
{filteredSessions.sessions.map((session: any) => (
<React.Fragment key={session.sessionId}>
<SessionItem
disableUser
session={session}
metaList={metaList}
/>
<div className="border-b" />
</React.Fragment>
))}
<div
className="flex items-center justify-between p-5"
ref={listRef}
>
<div>
{t('Showing')}{' '}
<span className="font-medium">
<div
className="flex items-center justify-between p-5"
ref={listRef}
>
<div>
{t('Showing')}{' '}
<span className="font-medium">
{(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize +
1}
metricStore.sessionsPageSize +
1}
</span>{' '}
{t('to')}{' '}
<span className="font-medium">
{t('to')}{' '}
<span className="font-medium">
{(metricStore.sessionsPage - 1) *
metricStore.sessionsPageSize +
filteredSessions.sessions.length}
metricStore.sessionsPageSize +
filteredSessions.sessions.length}
</span>{' '}
{t('of')}{' '}
<span className="font-medium">
{t('of')}{' '}
<span className="font-medium">
{numberWithCommas(filteredSessions.total)}
</span>{' '}
{t('sessions.')}
</div>
<Pagination
page={metricStore.sessionsPage}
total={filteredSessions.total}
onPageChange={(page: any) =>
metricStore.updateKey('sessionsPage', page)
}
limit={metricStore.sessionsPageSize}
debounceRequest={500}
/>
</div>
</NoContent>
</Loader>
{t('sessions.')}
</div>
<Pagination
page={metricStore.sessionsPage}
total={filteredSessions.total}
onPageChange={(page: any) =>
metricStore.updateKey('sessionsPage', page)
}
limit={metricStore.sessionsPageSize}
debounceRequest={500}
/>
</div>
</div>
);
</NoContent>
</Loader>
</div>
</div>
);
}
const getListSessionsBySeries = (data: any, seriesId: any) => {
const arr = data.reduce(
(arr: any, element: any) => {
if (seriesId === 'all') {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
arr.sessions.push(...sessions);
} else if (element.seriesId === seriesId) {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
const duplicates = element.sessions.length - sessions.length;
arr.sessions.push(...sessions);
arr.total = element.total - duplicates;
}
return arr;
},
{sessions: []},
);
arr.total =
seriesId === 'all'
? Math.max(...data.map((i: any) => i.total))
: data.find((i: any) => i.seriesId === seriesId).total;
return arr;
const arr = data.reduce(
(arr: any, element: any) => {
if (seriesId === 'all') {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
arr.sessions.push(...sessions);
} else if (element.seriesId === seriesId) {
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
const sessions = element.sessions.filter(
(i: any) => !sessionIds.includes(i.sessionId),
);
const duplicates = element.sessions.length - sessions.length;
arr.sessions.push(...sessions);
arr.total = element.total - duplicates;
}
return arr;
},
{ sessions: [] },
);
arr.total =
seriesId === 'all'
? Math.max(...data.map((i: any) => i.total))
: data.find((i: any) => i.seriesId === seriesId).total;
return arr;
};
export default observer(WidgetSessions);

View file

@ -92,9 +92,6 @@ function WidgetView({
filter: { filters: selectedCard.filters },
}),
];
} else if (selectedCard.cardType === TABLE) {
cardData.series = [new FilterSeries()];
cardData.series[0].filter.eventsOrder = 'and';
}
if (selectedCard.cardType === FUNNEL) {
cardData.series = [new FilterSeries()];

View file

@ -83,7 +83,6 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
});
const onChartClick = () => {
dashboardStore.setDrillDownPeriod(dashboardStore.period);
// if (!isWidget || isPredefined) return;
props.history.push(
withSiteId(

View file

@ -1,80 +1,52 @@
import React, { useEffect, useState } from 'react';
import React, { useEffect } from 'react';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import ReCAPTCHA from 'react-google-recaptcha';
import { Form, Input, Loader, Icon, Message } from 'UI';
import { Button } from 'antd';
import { validatePassword } from 'App/validate';
import { PASSWORD_POLICY } from 'App/constants';
import stl from './forgotPassword.module.css';
import { useTranslation } from 'react-i18next';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
const recaptchaRef = React.createRef();
const ERROR_DONT_MATCH = (t) => t("Passwords don't match.");
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
const { CAPTCHA_SITE_KEY } = window.env;
interface Props {
params: any;
}
function CreatePassword(props: Props & WithCaptchaProps) {
function CreatePassword(props: Props) {
const { t } = useTranslation();
const { params } = props;
const { userStore } = useStore();
const { loading } = userStore;
const { resetPassword } = userStore;
const [error, setError] = useState<string | null>(null);
const [validationError, setValidationError] = useState<string | null>(null);
const [updated, setUpdated] = useState(false);
const [passwordRepeat, setPasswordRepeat] = useState('');
const [password, setPassword] = useState('');
const [error, setError] = React.useState<string | null>(null);
const [validationError, setValidationError] = React.useState<string | null>(
null,
);
const [updated, setUpdated] = React.useState(false);
const [passwordRepeat, setPasswordRepeat] = React.useState('');
const [password, setPassword] = React.useState('');
const pass = params.get('pass');
const invitation = params.get('invitation');
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
const handleSubmit = (token?: string) => {
const handleSubmit = () => {
if (!validatePassword(password)) {
return;
}
resetPassword({
invitation,
pass,
password,
'g-recaptcha-response': token
})
.then(() => {
setUpdated(true);
})
.catch((err) => {
setError(err.message);
// Reset captcha for the next attempt
resetCaptcha();
});
void resetPassword({ invitation, pass, password });
};
const onSubmit = () => {
// Validate before attempting captcha verification
if (!validatePassword(password) || password !== passwordRepeat) {
setValidationError(
password !== passwordRepeat
? ERROR_DONT_MATCH(t)
: PASSWORD_POLICY(t)
);
return;
const onSubmit = (e: any) => {
e.preventDefault();
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
}
// Reset any previous errors
setError(null);
setValidationError(null);
submitWithCaptcha({ pass, invitation, password })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error) => {
console.error('Captcha verification failed:', error);
// The component will handle showing appropriate messages
});
};
const write = (e: any) => {
@ -91,7 +63,7 @@ function CreatePassword(props: Props & WithCaptchaProps) {
} else {
setValidationError(null);
}
}, [passwordRepeat, password, t]);
}, [passwordRepeat, password]);
return (
<Form
@ -101,8 +73,19 @@ function CreatePassword(props: Props & WithCaptchaProps) {
>
{!error && (
<>
<Loader loading={loading || isVerifyingCaptcha}>
<Loader loading={loading}>
<div data-hidden={updated} className="w-full">
{CAPTCHA_ENABLED && (
<div className={stl.recaptcha}>
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
sitekey={CAPTCHA_SITE_KEY}
onChange={(token: any) => handleSubmit(token)}
/>
</div>
)}
<Form.Field>
<label>{t('New password')}</label>
<Input
@ -149,15 +132,10 @@ function CreatePassword(props: Props & WithCaptchaProps) {
<Button
htmlType="submit"
type="primary"
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha || validationError !== null}
loading={loading}
className="w-full mt-4"
>
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Processing...')
: t('Create')}
{t('Create')}
</Button>
)}
</>
@ -175,4 +153,4 @@ function CreatePassword(props: Props & WithCaptchaProps) {
);
}
export default withCaptcha(observer(CreatePassword));
export default observer(CreatePassword);

View file

@ -1,26 +1,24 @@
import React, { useState } from 'react';
import React from 'react';
import { Loader, Icon } from 'UI';
import ReCAPTCHA from 'react-google-recaptcha';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
import { Form, Input, Button, Typography } from 'antd';
import { SquareArrowOutUpRight } from 'lucide-react';
import { useTranslation } from 'react-i18next';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
interface Props {
}
function ResetPasswordRequest(props: Props & WithCaptchaProps) {
function ResetPasswordRequest() {
const { t } = useTranslation();
const { userStore } = useStore();
const { loading } = userStore;
const { requestResetPassword } = userStore;
const [requested, setRequested] = useState(false);
const [email, setEmail] = useState('');
const [error, setError] = useState(null);
const [smtpError, setSmtpError] = useState<boolean>(false);
const { submitWithCaptcha, isVerifyingCaptcha, resetCaptcha } = props;
const recaptchaRef = React.createRef();
const [requested, setRequested] = React.useState(false);
const [email, setEmail] = React.useState('');
const [error, setError] = React.useState(null);
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
const { CAPTCHA_SITE_KEY } = window.env;
const [smtpError, setSmtpError] = React.useState<boolean>(false);
const write = (e: any) => {
const { name, value } = e.target;
@ -28,21 +26,22 @@ function ResetPasswordRequest(props: Props & WithCaptchaProps) {
};
const onSubmit = () => {
// Validation check
if (!email || email.trim() === '') {
return;
// e.preventDefault();
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
}
submitWithCaptcha({ email: email.trim() })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error: any) => {
console.error('Captcha verification failed:', error);
});
};
const handleSubmit = (token?: string) => {
const handleSubmit = (token?: any) => {
if (
CAPTCHA_ENABLED &&
recaptchaRef.current &&
(token === null || token === undefined)
)
return;
setError(null);
requestResetPassword({ email: email.trim(), 'g-recaptcha-response': token })
.catch((err: any) => {
@ -51,21 +50,29 @@ function ResetPasswordRequest(props: Props & WithCaptchaProps) {
}
setError(err.message);
// Reset captcha for the next attempt
resetCaptcha();
})
.finally(() => {
setRequested(true);
});
};
return (
<Form
onFinish={onSubmit}
style={{ minWidth: '50%' }}
className="flex flex-col"
>
<Loader loading={loading || isVerifyingCaptcha}>
<Loader loading={false}>
{CAPTCHA_ENABLED && (
<div className="flex justify-center">
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
data-hidden={requested}
sitekey={CAPTCHA_SITE_KEY}
onChange={(token: any) => handleSubmit(token)}
/>
</div>
)}
{!requested && (
<>
<Form.Item>
@ -85,14 +92,10 @@ function ResetPasswordRequest(props: Props & WithCaptchaProps) {
<Button
type="primary"
htmlType="submit"
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha}
loading={loading}
disabled={loading}
>
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Processing...')
: t('Email Password Reset Link')}
{t('Email Password Reset Link')}
</Button>
</>
)}
@ -143,4 +146,4 @@ function ResetPasswordRequest(props: Props & WithCaptchaProps) {
);
}
export default withCaptcha(observer(ResetPasswordRequest));
export default observer(ResetPasswordRequest);

View file

@ -1,18 +1,23 @@
import withPageTitle from 'HOCs/withPageTitle';
import cn from 'classnames';
import React, { useEffect, useState } from 'react';
import React, { useEffect, useMemo, useRef, useState } from 'react';
// Consider using a different approach for titles in functional components
import ReCAPTCHA from 'react-google-recaptcha';
import { useHistory } from 'react-router-dom';
import { observer } from 'mobx-react-lite';
import { toast } from 'react-toastify';
import { ENTERPRISE_REQUEIRED } from 'App/constants';
import { forgotPassword, signup } from 'App/routes';
import { Icon, Link, Loader } from 'UI';
import { Icon, Link, Loader, Tooltip } from 'UI';
import { Button, Form, Input } from 'antd';
import Copyright from 'Shared/Copyright';
import stl from './login.module.css';
import { useTranslation } from 'react-i18next';
import { useStore } from 'App/mstore';
import LanguageSwitcher from '../LanguageSwitcher';
import withCaptcha, { WithCaptchaProps } from 'App/withRecaptcha';
import SSOLogin from './SSOLogin';
const FORGOT_PASSWORD = forgotPassword();
const SIGNUP_ROUTE = signup();
@ -21,15 +26,14 @@ interface LoginProps {
location: Location;
}
function Login({
location,
submitWithCaptcha,
isVerifyingCaptcha,
resetCaptcha,
}: LoginProps & WithCaptchaProps) {
const CAPTCHA_ENABLED = window.env.CAPTCHA_ENABLED === 'true';
function Login({ location }: LoginProps) {
const { t } = useTranslation();
const [email, setEmail] = useState('');
const [password, setPassword] = useState('');
// const CAPTCHA_ENABLED = useMemo(() => window.env.CAPTCHA_ENABLED === 'true', []);
const recaptchaRef = useRef<ReCAPTCHA>(null);
const { loginStore, userStore } = useStore();
const { errors } = userStore.loginRequest;
const { loading } = loginStore;
@ -45,6 +49,7 @@ function Login({
}, [authDetails]);
useEffect(() => {
// void fetchTenants();
const jwt = params.get('jwt');
const spotJwt = params.get('spotJwt');
if (spotJwt) {
@ -103,36 +108,32 @@ function Login({
if (resp) {
userStore.syntheticLogin(resp);
setJwt({ jwt: resp.jwt, spotJwt: resp.spotJwt ?? null });
if (resp.spotJwt) {
handleSpotLogin(resp.spotJwt);
}
handleSpotLogin(resp.spotJwt);
}
})
.catch((e) => {
userStore.syntheticLoginError(e);
resetCaptcha();
});
};
const onSubmit = () => {
if (!email || !password) {
return;
if (CAPTCHA_ENABLED && recaptchaRef.current) {
recaptchaRef.current.execute();
} else if (!CAPTCHA_ENABLED) {
handleSubmit();
}
submitWithCaptcha({ email: email.trim(), password })
.then((data) => {
handleSubmit(data['g-recaptcha-response']);
})
.catch((error: any) => {
console.error('Captcha error:', error);
});
};
const ssoLink =
window !== window.top
? `${window.location.origin}/api/sso/saml2?iFrame=true`
: `${window.location.origin}/api/sso/saml2`;
return (
<div className="flex items-center justify-center h-screen">
<div className="flex flex-col items-center">
<div className="m-10 ">
<img src="/assets/logo.svg" width={200} alt="Company Logo" />
<img src="/assets/logo.svg" width={200} />
</div>
<div className="border rounded-lg bg-white shadow-sm">
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
@ -144,7 +145,15 @@ function Login({
className={cn('flex items-center justify-center flex-col')}
style={{ width: '350px' }}
>
<Loader loading={loading || isVerifyingCaptcha}>
<Loader loading={loading}>
{CAPTCHA_ENABLED && (
<ReCAPTCHA
ref={recaptchaRef}
size="invisible"
sitekey={window.env.CAPTCHA_SITE_KEY}
onChange={(token) => handleSubmit(token)}
/>
)}
<div style={{ width: '350px' }} className="px-8">
<Form.Item>
<label>{t('Email Address')}</label>
@ -177,8 +186,8 @@ function Login({
</Loader>
{errors && errors.length ? (
<div className="px-8 my-2 w-full">
{errors.map((error, index) => (
<div key={index} className="flex items-center bg-red-lightest rounded p-3">
{errors.map((error) => (
<div className="flex items-center bg-red-lightest rounded p-3">
<Icon name="info" color="red" size="20" />
<span className="color-red ml-2">
{error}
@ -195,14 +204,8 @@ function Login({
className="mt-2 w-full text-center rounded-lg"
type="primary"
htmlType="submit"
loading={loading || isVerifyingCaptcha}
disabled={loading || isVerifyingCaptcha}
>
{isVerifyingCaptcha
? t('Verifying...')
: loading
? t('Logging in...')
: t('Login')}
{t('Login')}
</Button>
<div className="my-8 flex justify-center items-center flex-wrap">
@ -216,12 +219,63 @@ function Login({
</div>
</Form>
<SSOLogin authDetails={authDetails} />
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
{authDetails.sso ? (
<a href={ssoLink} rel="noopener noreferrer">
<Button type="text" htmlType="submit">
{`${t('Login with SSO')} ${
authDetails.ssoProvider
? `(${authDetails.ssoProvider})`
: ''
}`}
</Button>
</a>
) : (
<Tooltip
delay={0}
title={
<div className="text-center">
{authDetails.edition === 'ee' ? (
<span>
{t('SSO has not been configured.')}
<br />
{t('Please reach out to your admin.')}
</span>
) : (
ENTERPRISE_REQUEIRED(t)
)}
</div>
}
placement="top"
>
<Button
type="text"
htmlType="submit"
className="pointer-events-none opacity-30"
>
{`${t('Login with SSO')} ${
authDetails.ssoProvider
? `(${authDetails.ssoProvider})`
: ''
}`}
</Button>
</Tooltip>
)}
</div>
</div>
<div
className={cn('flex items-center w-96 justify-center my-8', {
'!hidden': !authDetails?.enforceSSO,
})}
>
<a href={ssoLink} rel="noopener noreferrer">
<Button type="primary">
{`${t('Login with SSO')} ${
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
}`}
</Button>
</a>
</div>
{authDetails?.enforceSSO && (
<SSOLogin authDetails={authDetails} enforceSSO={true} />
)}
</div>
</div>
@ -233,6 +287,4 @@ function Login({
);
}
export default withPageTitle('Login - OpenReplay')(
withCaptcha(observer(Login))
);
export default withPageTitle('Login - OpenReplay')(observer(Login));

View file

@ -1,78 +0,0 @@
import React from 'react';
import cn from 'classnames';
import { Button, Tooltip } from 'antd';
import { useTranslation } from 'react-i18next';
import { ENTERPRISE_REQUEIRED } from 'App/constants';
import stl from './login.module.css';
import { useStore } from 'App/mstore';
interface SSOLoginProps {
authDetails: any;
enforceSSO?: boolean;
}
const SSOLogin = ({ authDetails, enforceSSO = false }: SSOLoginProps) => {
const { userStore } = useStore();
const { t } = useTranslation();
const { isSSOSupported } = userStore;
const getSSOLink = () =>
window !== window.top
? `${window.location.origin}/api/sso/saml2?iFrame=true`
: `${window.location.origin}/api/sso/saml2`;
const ssoLink = getSSOLink();
const ssoButtonText = `${t('Login with SSO')} ${authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
}`;
if (enforceSSO) {
return (
<div className={cn('flex items-center w-96 justify-center my-8')}>
<a href={ssoLink} rel="noopener noreferrer">
<Button type="primary">{ssoButtonText}</Button>
</a>
</div>
);
}
return (
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
{authDetails.sso ? (
<a href={ssoLink} rel="noopener noreferrer">
<Button type="text" htmlType="submit">
{ssoButtonText}
</Button>
</a>
) : (
<Tooltip
title={
<div className="text-center">
{isSSOSupported ? (
<span>
{t('SSO has not been configured.')}
<br />
{t('Please reach out to your admin.')}
</span>
) : (
ENTERPRISE_REQUEIRED(t)
)}
</div>
}
placement="top"
>
<span className="cursor-not-allowed">
<Button
type="text"
htmlType="submit"
disabled={true}
>
{ssoButtonText}
</Button>
</span>
</Tooltip>
)}
</div>
);
};
export default SSOLogin;

View file

@ -1,14 +1,16 @@
import React from 'react';
import { Redirect, Route, RouteComponentProps, Switch } from 'react-router';
import { withRouter } from 'react-router-dom';
import { OB_TABS, onboarding as onboardingRoute, withSiteId } from 'App/routes';
import { Icon } from 'UI';
import IdentifyUsersTab from './components/IdentifyUsersTab';
import InstallOpenReplayTab from './components/InstallOpenReplayTab';
import IntegrationsTab from './components/IntegrationsTab';
import ManageUsersTab from './components/ManageUsersTab';
import SideMenu from './components/SideMenu';
import { useTranslation } from 'react-i18next';
import { Smartphone, AppWindow } from 'lucide-react';
interface Props {
match: {
@ -31,7 +33,7 @@ function Onboarding(props: Props) {
{
label: (
<div className="font-semibold flex gap-2 items-center">
<AppWindow size={16} />
<Icon name="browser/browser" size={16} />
&nbsp;{t('Web')}
</div>
),
@ -40,7 +42,7 @@ function Onboarding(props: Props) {
{
label: (
<div className="font-semibold flex gap-2 items-center">
<Smartphone size={16} />
<Icon name="mobile" size={16} />
&nbsp;{t('Mobile')}
</div>
),

View file

@ -130,20 +130,18 @@ function IdentifyUsersTab(props: Props) {
'To identify users through metadata, you will have to explicitly specify your user metadata so it can be injected during sessions. Follow the below steps',
)}
</p>
<div className="flex items-center gap-2 mb-2">
<div className="flex items-start">
<CircleNumber text="1" />
<MetadataList />
</div>
<div className="my-6" />
<div className="flex items-start">
<div>
<CircleNumber text="2" />
<CircleNumber text="2" />
<div className="pt-1 w-full">
<span className="font-bold">
{t('Inject metadata when recording sessions')}
</span>
</div>
<div className="pt-1 w-full">
<div className="my-2">
{t('Use the')}&nbsp;
<span className="highlight-blue">setMetadata</span>{' '}

View file

@ -8,7 +8,6 @@ import MobileOnboardingTabs from '../OnboardingTabs/OnboardingMobileTabs';
import ProjectFormButton from '../ProjectFormButton';
import withOnboarding, { WithOnboardingProps } from '../withOnboarding';
import { useTranslation } from 'react-i18next';
import { CircleHelp } from 'lucide-react'
interface Props extends WithOnboardingProps {
platforms: Array<{
@ -46,8 +45,8 @@ function InstallOpenReplayTab(props: Props) {
</div>
<a href={"https://docs.openreplay.com/en/sdk/using-or/"} target="_blank">
<Button size={"small"} type={"text"} className="ml-2 flex items-center gap-2">
<CircleHelp size={14} />
<div>{t('See Documentation')}</div>
<Icon name={"question-circle"} />
<div className={"text-main"}>{t('See Documentation')}</div>
</Button>
</a>
</h1>

View file

@ -55,14 +55,16 @@ function MetadataList() {
<Button type="default" onClick={() => openModal()}>
{t('Add Metadata')}
</Button>
{fields.map((f, index) => (
<TagBadge
key={index}
text={f.key}
onRemove={() => removeMetadata(f)}
outline
/>
))}
<div className="flex ml-2">
{fields.map((f, index) => (
<TagBadge
key={index}
text={f.key}
onRemove={() => removeMetadata(f)}
outline
/>
))}
</div>
</div>
);
}

View file

@ -1,32 +0,0 @@
import React from 'react'
import DocCard from "App/components/shared/DocCard";
import { useTranslation } from 'react-i18next';
import { Mail } from 'lucide-react'
import { CopyButton } from "UI";
export function CollabCard({ showUserModal }: { showUserModal: () => void }) {
const { t } = useTranslation();
return (
<DocCard title={t('Need help from team member?')}>
<div className={'text-main cursor-pointer flex items-center gap-2'} onClick={showUserModal}>
<Mail size={14} />
<span>
{t('Invite and Collaborate')}
</span>
</div>
</DocCard>
)
}
export function ProjectKeyCard({ projectKey }: { projectKey: string }) {
const { t } = useTranslation();
return (
<DocCard title={t('Project Key')}>
<div className="p-2 rounded bg-white flex justify-between items-center">
<div className={'font-mono'}>{projectKey}</div>
<CopyButton content={projectKey} className={'capitalize font-medium text-neutral-400'} />
</div>
</DocCard>
)
}

View file

@ -7,16 +7,17 @@ import stl from './installDocs.module.css';
import { useTranslation } from 'react-i18next';
const installationCommand = 'npm i @openreplay/tracker';
const usageCode = `import Tracker from '@openreplay/tracker';
const usageCode = `import { tracker } from '@openreplay/tracker';
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});
tracker.start()`;
const usageCodeSST = `import Tracker from '@openreplay/tracker/cjs';
const usageCodeSST = `import { tracker } from '@openreplay/tracker/cjs';
// alternatively you can use dynamic import without /cjs suffix to prevent issues with window scope
const tracker = new Tracker({
tracker.configure({
projectKey: "PROJECT_KEY",
ingestPoint: "https://${window.location.hostname}/ingest",
});

View file

@ -4,7 +4,6 @@ import DocCard from 'Shared/DocCard/DocCard';
import { useModal } from 'App/components/Modal';
import UserForm from 'App/components/Client/Users/components/UserForm/UserForm';
import AndroidInstallDocs from 'Components/Onboarding/components/OnboardingTabs/InstallDocs/AndroidInstallDocs';
import { CollabCard, ProjectKeyCard } from "./Callouts";
import MobileInstallDocs from './InstallDocs/MobileInstallDocs';
import { useTranslation } from 'react-i18next';
@ -40,9 +39,18 @@ function MobileTrackingCodeModal(props: Props) {
</div>
<div className="col-span-2">
<CollabCard showUserModal={showUserModal} />
<DocCard title={t('Need help from team member?')}>
<a className="link" onClick={showUserModal}>
{t('Invite and Collaborate')}
</a>
</DocCard>
<ProjectKeyCard projectKey={site.projectKey} />
<DocCard title={t('Project Key')}>
<div className="p-2 rounded bg-white flex justify-between items-center">
{site.projectKey}
<CopyButton content={site.projectKey} />
</div>
</DocCard>
</div>
</div>
);
@ -54,9 +62,18 @@ function MobileTrackingCodeModal(props: Props) {
</div>
<div className="col-span-2">
<CollabCard showUserModal={showUserModal} />
<DocCard title={t('Need help from team member?')}>
<a className="link" onClick={showUserModal}>
{t('Invite and Collaborate')}
</a>
</DocCard>
<ProjectKeyCard projectKey={site.projectKey} />
<DocCard title={t('Project Key')}>
<div className="p-2 rounded bg-white flex justify-between items-center">
{site.projectKey}
<CopyButton content={site.projectKey} />
</div>
</DocCard>
</div>
</div>
);

View file

@ -3,7 +3,6 @@ import { Tabs, Icon, CopyButton } from 'UI';
import DocCard from 'Shared/DocCard/DocCard';
import { useModal } from 'App/components/Modal';
import UserForm from 'App/components/Client/Users/components/UserForm/UserForm';
import { CollabCard, ProjectKeyCard } from "./Callouts";
import InstallDocs from './InstallDocs';
import ProjectCodeSnippet from './ProjectCodeSnippet';
import { useTranslation } from 'react-i18next';
@ -38,9 +37,20 @@ function TrackingCodeModal(props: Props) {
</div>
<div className="col-span-2">
<CollabCard showUserModal={showUserModal} />
<ProjectKeyCard projectKey={site.projectKey} />
<DocCard title="Need help from team member?">
<a className="link" onClick={showUserModal}>
{t('Invite and Collaborate')}
</a>
</DocCard>
<DocCard title="Project Key">
<div className="rounded bg-white px-2 py-1 flex items-center justify-between">
<span>{site.projectKey}</span>
<CopyButton
content={site.projectKey}
className="capitalize"
/>
</div>
</DocCard>
<DocCard title="Other ways to install">
<a
className="link flex items-center"
@ -67,9 +77,18 @@ function TrackingCodeModal(props: Props) {
</div>
<div className="col-span-2">
<CollabCard showUserModal={showUserModal} />
<DocCard title="Need help from team member?">
<a className="link" onClick={showUserModal}>
{t('Invite and Collaborate')}
</a>
</DocCard>
<ProjectKeyCard projectKey={site.projectKey} />
<DocCard title="Project Key">
<div className="p-2 rounded bg-white flex justify-between items-center">
{site.projectKey}
<CopyButton content={site.projectKey} />
</div>
</DocCard>
</div>
</div>
);

View file

@ -41,7 +41,7 @@ function SideMenu(props: Props) {
<Menu
mode="inline"
onClick={handleClick}
style={{ border: 'none' }}
style={{ marginTop: '8px', border: 'none' }}
selectedKeys={activeTab ? [activeTab] : []}
>
<Menu.Item

View file

@ -8,7 +8,7 @@ import {
LikeFilled,
LikeOutlined,
} from '@ant-design/icons';
import { Tour, TourProps } from 'antd';
import { Tour, TourProps } from './.store/antd-virtual-7db13b4af6/package';
import { useTranslation } from 'react-i18next';
interface Props {

View file

@ -91,7 +91,7 @@ function PlayerBlockHeader(props: Props) {
)}
</div>
</div>
<div className="relative border-l" style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}>
<div className="relative border-l" style={{ minWidth: '270px' }}>
<Tabs
tabs={TABS}
active={activeTab}

View file

@ -61,7 +61,7 @@ function PlayerContent({
className="w-full"
style={
activeTab && !fullscreen
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
? { maxWidth: 'calc(100% - 270px)' }
: undefined
}
>

View file

@ -42,7 +42,7 @@ function DropdownAudioPlayer({
return {
url: data.url,
timestamp: data.timestamp,
start: Math.max(0, startTs),
start: startTs,
};
}),
[audioEvents.length, sessionStart],

View file

@ -114,17 +114,19 @@ function PlayerBlockHeader(props: any) {
)}
{_metaList.length > 0 && (
<SessionMetaList
horizontal
metaList={_metaList}
maxLength={2}
/>
<div className="h-full flex items-center px-2 gap-1">
<SessionMetaList
className=""
metaList={_metaList}
maxLength={2}
/>
</div>
)}
</div>
</div>
<div
className="px-2 relative border-l border-l-gray-lighter"
style={{ minWidth: activeTab === 'EXPORT' ? '360px' : '270px' }}
style={{ minWidth: '270px' }}
>
<Tabs
tabs={TABS}

View file

@ -65,7 +65,7 @@ function PlayerContent({
className="w-full"
style={
activeTab && !fullscreen
? { maxWidth: `calc(100% - ${activeTab === 'EXPORT' ? '360px' : '270px'})` }
? { maxWidth: 'calc(100% - 270px)' }
: undefined
}
>

View file

@ -182,7 +182,6 @@ function Player(props: IProps) {
setActiveTab={(tab: string) =>
activeTab === tab ? props.setActiveTab('') : props.setActiveTab(tab)
}
activeTab={activeTab}
speedDown={playerContext.player.speedDown}
speedUp={playerContext.player.speedUp}
jump={playerContext.player.jump}

View file

@ -7,16 +7,13 @@ import { Icon } from 'UI';
function LogsButton({
integrated,
onClick,
shorten,
}: {
integrated: string[];
onClick: () => void;
shorten?: boolean;
}) {
return (
<ControlButton
label={shorten ? null : "Traces"}
customKey="traces"
label="Traces"
customTags={
<Avatar.Group>
{integrated.map((name) => (

View file

@ -38,8 +38,8 @@ function WebPlayer(props: any) {
uxtestingStore,
uiPlayerStore,
integrationsStore,
userStore,
} = useStore();
const devTools = sessionStore.devTools
const session = sessionStore.current;
const { prefetched } = sessionStore;
const startedAt = sessionStore.current.startedAt || 0;
@ -57,17 +57,14 @@ function WebPlayer(props: any) {
const [fullView, setFullView] = useState(false);
React.useEffect(() => {
const handleActivation = () => {
if (!document.hidden) {
setWindowActive(true);
document.removeEventListener('visibilitychange', handleActivation);
}
};
document.addEventListener('visibilitychange', handleActivation);
return () => {
devTools.update('network', { activeTab: 'ALL' });
document.removeEventListener('visibilitychange', handleActivation);
if (windowActive) {
const handleActivation = () => {
if (!document.hidden) {
setWindowActive(true);
document.removeEventListener('visibilitychange', handleActivation);
}
};
document.addEventListener('visibilitychange', handleActivation);
}
}, []);

View file

@ -169,6 +169,6 @@ function TabChange({ from, to, activeUrl, onClick }) {
</div>
</div>
);
};
}
export default observer(EventGroupWrapper);

View file

@ -4,17 +4,17 @@ import cn from 'classnames';
import { observer } from 'mobx-react-lite';
import React from 'react';
import { VList, VListHandle } from 'virtua';
import { Button } from 'antd';
import { Button } from 'antd'
import { PlayerContext } from 'App/components/Session/playerContext';
import { useStore } from 'App/mstore';
import { Icon } from 'UI';
import { Search } from 'lucide-react';
import { Search } from 'lucide-react'
import EventGroupWrapper from './EventGroupWrapper';
import EventSearch from './EventSearch/EventSearch';
import styles from './eventsBlock.module.css';
import { useTranslation } from 'react-i18next';
import { CloseOutlined } from "@ant-design/icons";
import { Tooltip } from "antd";
import { CloseOutlined } from ".store/@ant-design-icons-virtual-42686020c5/package";
import { Tooltip } from ".store/antd-virtual-9dbfadb7f6/package";
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
interface IProps {
@ -25,7 +25,7 @@ const MODES = {
SELECT: 'select',
SEARCH: 'search',
EXPORT: 'export',
};
}
function EventsBlock(props: IProps) {
const defaultFramework = getDefaultFramework();
@ -95,7 +95,7 @@ function EventsBlock(props: IProps) {
? e.time >= zoomStartTs && e.time <= zoomEndTs
: false
: true,
);
);
}, [
filteredLength,
notesWithEvtsLength,
@ -126,7 +126,6 @@ function EventsBlock(props: IProps) {
},
[usedEvents, time, endTime],
);
const currentTimeEventIndex = findLastFitting(time);
const write = ({
@ -183,7 +182,6 @@ function EventsBlock(props: IProps) {
const isTabChange = 'type' in event && event.type === 'TABCHANGE';
const isCurrent = index === currentTimeEventIndex;
const isPrev = index < currentTimeEventIndex;
return (
<EventGroupWrapper
query={query}
@ -251,14 +249,12 @@ function EventsBlock(props: IProps) {
onClick={() => setMode(MODES.SEARCH)}
>
<Search size={14} />
<div>
{t('Search')}&nbsp;{usedEvents.length}&nbsp;{t('events')}
</div>
<div>{t('Search')}&nbsp;{usedEvents.length}&nbsp;{t('events')}</div>
</Button>
<Tooltip title={t('Close Panel')} placement="bottom">
<Tooltip title={t('Close Panel')} placement='bottom' >
<Button
className="ml-auto"
type="text"
type='text'
onClick={() => {
setActiveTab('');
}}
@ -267,23 +263,19 @@ function EventsBlock(props: IProps) {
</Tooltip>
</div>
) : null}
{mode === MODES.SEARCH ? (
{mode === MODES.SEARCH ?
<div className={'flex items-center gap-2'}>
<EventSearch
onChange={write}
setActiveTab={setActiveTab}
value={query}
eventsText={
usedEvents.length
? `${usedEvents.length} ${t('Events')}`
: `0 ${t('Events')}`
usedEvents.length ? `${usedEvents.length} ${t('Events')}` : `0 ${t('Events')}`
}
/>
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>
{t('Cancel')}
</Button>
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>{t('Cancel')}</Button>
</div>
) : null}
: null}
</div>
<div
className={cn('flex-1 pb-4', styles.eventsList)}

View file

@ -65,6 +65,7 @@ function GraphQL({ panelHeight }: { panelHeight: number }) {
const filterList = (list: any, value: string) => {
const filterRE = getRE(value, 'i');
const { t } = useTranslation();
return value
? list.filter(

View file

@ -4,7 +4,7 @@ import { Popover, Button } from 'antd';
import stl from './controlButton.module.css';
interface IProps {
label: React.ReactNode;
label: string;
icon?: string;
disabled?: boolean;
onClick?: () => void;
@ -18,7 +18,6 @@ interface IProps {
noIcon?: boolean;
popover?: React.ReactNode;
customTags?: React.ReactNode;
customKey?: string;
}
function ControlButton({
@ -29,28 +28,29 @@ function ControlButton({
active = false,
popover = undefined,
customTags,
customKey,
}: IProps) {
return (
<Popover content={popover} open={popover ? undefined : false}>
<Button
size="small"
onClick={onClick}
id={`control-button-${customKey ? customKey.toLowerCase() : label!.toString().toLowerCase()}`}
id={`control-button-${label.toLowerCase()}`}
disabled={disabled}
>
{customTags}
{hasErrors && (
<div className="w-2 h-2 rounded-full bg-red" />
<div className={stl.labels}>
<div className={stl.errorSymbol} />
</div>
)}
{label && <span
<span
className={cn(
'font-semibold hover:text-main',
active ? 'color-main' : 'color-gray-darkest',
)}
>
{label}
</span>}
</span>
</Button>
</Popover>
);

View file

@ -32,8 +32,6 @@ import {
} from 'App/mstore/uiPlayerStore';
import { Icon } from 'UI';
import LogsButton from 'App/components/Session/Player/SharedComponents/BackendLogs/LogsButton';
import { CodeOutlined, DashboardOutlined, ClusterOutlined } from '@ant-design/icons';
import { ArrowDownUp, ListCollapse, Merge, Waypoints } from 'lucide-react'
import ControlButton from './ControlButton';
import Timeline from './Timeline';
@ -54,23 +52,23 @@ export const SKIP_INTERVALS = {
function getStorageName(type: any) {
switch (type) {
case STORAGE_TYPES.REDUX:
return { name: 'Redux', icon: <Icon name='integrations/redux' size={14} /> };
return 'Redux';
case STORAGE_TYPES.MOBX:
return { name: 'Mobx', icon: <Icon name='integrations/mobx' size={14} /> };
return 'Mobx';
case STORAGE_TYPES.VUEX:
return { name: 'Vuex', icon: <Icon name='integrations/vuejs' size={14} /> };
return 'Vuex';
case STORAGE_TYPES.NGRX:
return { name: 'NgRx', icon: <Icon name='integrations/ngrx' size={14} /> };
return 'NgRx';
case STORAGE_TYPES.ZUSTAND:
return { name: 'Zustand', icon: <Icon name='integrations/zustand' size={14} /> };
return 'Zustand';
case STORAGE_TYPES.NONE:
return { name: 'State', icon: <ClusterOutlined size={14} /> };
return 'State';
default:
return { name: 'State', icon: <ClusterOutlined size={14} /> };
return 'State';
}
}
function Controls({ setActiveTab, activeTab }: any) {
function Controls({ setActiveTab }: any) {
const { player, store } = React.useContext(PlayerContext);
const {
uxtestingStore,
@ -193,7 +191,6 @@ function Controls({ setActiveTab, activeTab }: any) {
bottomBlock={bottomBlock}
disabled={disabled}
events={events}
activeTab={activeTab}
/>
)}
@ -215,7 +212,6 @@ interface IDevtoolsButtons {
bottomBlock: number;
disabled: boolean;
events: any[];
activeTab?: string;
}
const DevtoolsButtons = observer(
@ -225,7 +221,6 @@ const DevtoolsButtons = observer(
bottomBlock,
disabled,
events,
activeTab,
}: IDevtoolsButtons) => {
const { t } = useTranslation();
const { aiSummaryStore, integrationsStore } = useStore();
@ -267,36 +262,6 @@ const DevtoolsButtons = observer(
const possibleAudio = events.filter((e) => e.name.includes('media/audio'));
const integratedServices =
integrationsStore.integrations.backendLogIntegrations;
const showIcons = activeTab === 'EXPORT'
const labels = {
console: {
icon: <CodeOutlined size={14} />,
label: t('Console'),
},
performance: {
icon: <DashboardOutlined size={14} />,
label: t('Performance'),
},
network: {
icon: <ArrowDownUp size={14} strokeWidth={2} />,
label: t('Network'),
},
events: {
icon: <ListCollapse size={14} strokeWidth={2} />,
label: t('Events'),
},
state: {
icon: getStorageName(storageType).icon,
label: getStorageName(storageType).name,
},
graphql: {
icon: <Merge size={14} strokeWidth={2} />,
label: 'Graphql',
}
}
// @ts-ignore
const getLabel = (block: string) => labels[block][showIcons ? 'icon' : 'label']
return (
<>
{isSaas ? <SummaryButton onClick={showSummary} /> : null}
@ -309,7 +274,6 @@ const DevtoolsButtons = observer(
</div>
</div>
}
customKey="xray"
label="X-Ray"
onClick={() => toggleBottomTools(OVERVIEW)}
active={bottomBlock === OVERVIEW && !inspectorMode}
@ -322,11 +286,10 @@ const DevtoolsButtons = observer(
<div>{t('Launch Console')}</div>
</div>
}
customKey="console"
disabled={disableButtons}
onClick={() => toggleBottomTools(CONSOLE)}
active={bottomBlock === CONSOLE && !inspectorMode}
label={getLabel('console')}
label={t('Console')}
hasErrors={logRedCount > 0 || showExceptions}
/>
@ -337,11 +300,10 @@ const DevtoolsButtons = observer(
<div>{t('Launch Network')}</div>
</div>
}
customKey="network"
disabled={disableButtons}
onClick={() => toggleBottomTools(NETWORK)}
active={bottomBlock === NETWORK && !inspectorMode}
label={getLabel('network')}
label={t('Network')}
hasErrors={resourceRedCount > 0}
/>
@ -352,11 +314,10 @@ const DevtoolsButtons = observer(
<div>{t('Launch Performance')}</div>
</div>
}
customKey="performance"
disabled={disableButtons}
onClick={() => toggleBottomTools(PERFORMANCE)}
active={bottomBlock === PERFORMANCE && !inspectorMode}
label={getLabel('performance')}
label="Performance"
/>
{showGraphql && (
@ -364,8 +325,7 @@ const DevtoolsButtons = observer(
disabled={disableButtons}
onClick={() => toggleBottomTools(GRAPHQL)}
active={bottomBlock === GRAPHQL && !inspectorMode}
label={getLabel('graphql')}
customKey="graphql"
label="Graphql"
/>
)}
@ -377,11 +337,10 @@ const DevtoolsButtons = observer(
<div>{t('Launch State')}</div>
</div>
}
customKey="state"
disabled={disableButtons}
onClick={() => toggleBottomTools(STORAGE)}
active={bottomBlock === STORAGE && !inspectorMode}
label={getLabel('state')}
label={getStorageName(storageType) as string}
/>
)}
<ControlButton
@ -391,16 +350,14 @@ const DevtoolsButtons = observer(
<div>{t('Launch Events')}</div>
</div>
}
customKey="events"
disabled={disableButtons}
onClick={() => toggleBottomTools(STACKEVENTS)}
active={bottomBlock === STACKEVENTS && !inspectorMode}
label={getLabel('events')}
label={t('Events')}
hasErrors={stackRedCount > 0}
/>
{showProfiler && (
<ControlButton
customKey="profiler"
disabled={disableButtons}
onClick={() => toggleBottomTools(PROFILER)}
active={bottomBlock === PROFILER && !inspectorMode}
@ -411,7 +368,6 @@ const DevtoolsButtons = observer(
<LogsButton
integrated={integratedServices.map((service) => service.name)}
onClick={() => toggleBottomTools(BACKENDLOGS)}
shorten={showIcons}
/>
) : null}
{possibleAudio.length ? (

View file

@ -6,11 +6,9 @@ import {
import { observer } from 'mobx-react-lite';
import stl from './timeline.module.css';
import { getTimelinePosition } from './getTimelinePosition';
import { useStore } from '@/mstore';
function EventsList() {
const { store } = useContext(PlayerContext);
const { uiPlayerStore } = useStore();
const { eventCount, endTime } = store.get();
const { tabStates } = store.get();
@ -19,6 +17,7 @@ function EventsList() {
() => Object.values(tabStates)[0]?.eventList.filter((e) => e.time) || [],
[eventCount],
);
React.useEffect(() => {
const hasDuplicates = events.some(
(e, i) =>

View file

@ -49,6 +49,7 @@
z-index: 2;
}
.event {
position: absolute;
width: 2px;

View file

@ -38,7 +38,6 @@ function SubHeader(props) {
projectsStore,
userStore,
issueReportingStore,
settingsStore
} = useStore();
const { t } = useTranslation();
const { favorite } = sessionStore.current;
@ -46,7 +45,7 @@ function SubHeader(props) {
const currentSession = sessionStore.current;
const projectId = projectsStore.siteId;
const integrations = integrationsStore.issues.list;
const { player, store } = React.useContext(PlayerContext);
const { store } = React.useContext(PlayerContext);
const { location: currentLocation = 'loading...' } = store.get();
const hasIframe = localStorage.getItem(IFRAME) === 'true';
const [hideTools, setHideTools] = React.useState(false);
@ -128,13 +127,6 @@ function SubHeader(props) {
});
};
const showVModeBadge = store.get().vModeBadge;
const onVMode = () => {
settingsStore.sessionSettings.updateKey('virtualMode', true);
player.enableVMode?.();
location.reload();
}
return (
<>
<div
@ -151,8 +143,6 @@ function SubHeader(props) {
siteId={projectId!}
currentLocation={currentLocation}
version={currentSession?.trackerVersion ?? ''}
virtualElsFailed={showVModeBadge}
onVMode={onVMode}
/>
<SessionTabs />

View file

@ -202,7 +202,7 @@ function UnitStepsModal({ onClose }: Props) {
<div className={'w-full'}>
<CodeBlock
width={340}
height={'calc(100vh - 174px)'}
height={'calc(100vh - 146px)'}
extra={`${events.length} Events`}
copy
code={eventStr}

View file

@ -34,46 +34,38 @@ const WarnBadge = React.memo(
currentLocation,
version,
siteId,
virtualElsFailed,
onVMode,
}: {
currentLocation: string;
version: string;
siteId: string;
virtualElsFailed: boolean;
onVMode: () => void;
}) => {
const { t } = useTranslation();
const localhostWarnSiteKey = localhostWarn(siteId);
const defaultLocalhostWarn =
localStorage.getItem(localhostWarnSiteKey) !== '1';
const localhostWarnActive = Boolean(
const localhostWarnActive =
currentLocation &&
defaultLocalhostWarn &&
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation)
)
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation);
const trackerVersion = window.env.TRACKER_VERSION ?? undefined;
const trackerVerDiff = compareVersions(version, trackerVersion);
const trackerWarnActive = trackerVerDiff !== VersionComparison.Same;
const [warnings, setWarnings] = React.useState<[localhostWarn: boolean, trackerWarn: boolean, virtualElsFailWarn: boolean]>([localhostWarnActive, trackerWarnActive, virtualElsFailed])
const [showLocalhostWarn, setLocalhostWarn] =
React.useState(localhostWarnActive);
const [showTrackerWarn, setTrackerWarn] = React.useState(trackerWarnActive);
React.useEffect(() => {
setWarnings([localhostWarnActive, trackerWarnActive, virtualElsFailed])
}, [localhostWarnActive, trackerWarnActive, virtualElsFailed])
const closeWarning = (type: 0 | 1 | 2) => {
const closeWarning = (type: 1 | 2) => {
if (type === 1) {
localStorage.setItem(localhostWarnSiteKey, '1');
setLocalhostWarn(false);
}
if (type === 2) {
setTrackerWarn(false);
}
setWarnings((prev) => {
const newWarnings = [...prev];
newWarnings[type] = false;
return newWarnings;
});
};
if (!warnings.some(el => el === true)) return null;
if (!showLocalhostWarn && !showTrackerWarn) return null;
return (
<div
@ -87,7 +79,7 @@ const WarnBadge = React.memo(
fontWeight: 500,
}}
>
{warnings[0] ? (
{showLocalhostWarn ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div>
<span>{t('Some assets may load incorrectly on localhost.')}</span>
@ -109,7 +101,7 @@ const WarnBadge = React.memo(
</div>
</div>
) : null}
{warnings[1] ? (
{showTrackerWarn ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div>
<div>
@ -133,21 +125,6 @@ const WarnBadge = React.memo(
</div>
</div>
<div
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(1)}
>
<Icon name="close" size={16} color="black" />
</div>
</div>
) : null}
{warnings[2] ? (
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
<div className="flex flex-col">
<div>{t('If you have issues displaying custom HTML elements (i.e when using LWC), consider turning on Virtual Mode.')}</div>
<div className='link' onClick={onVMode}>{t('Enable')}</div>
</div>
<div
className="py-1 ml-3 cursor-pointer"
onClick={() => closeWarning(2)}

View file

@ -12,123 +12,60 @@ import {
getDateRangeFromValue,
getDateRangeLabel,
} from 'App/dateRange';
import { DateTime, Interval, Settings } from 'luxon';
import { DateTime, Interval } from 'luxon';
import styles from './dateRangePopup.module.css';
import { useTranslation } from 'react-i18next';
function DateRangePopup(props: any) {
const { t } = useTranslation();
const [displayDates, setDisplayDates] = React.useState<[Date, Date]>([new Date(), new Date()]);
const [range, setRange] = React.useState(
props.selectedDateRange ||
Interval.fromDateTimes(DateTime.now(), DateTime.now()),
);
const [value, setValue] = React.useState<string | null>(null);
React.useEffect(() => {
if (props.selectedDateRange) {
const start = new Date(
props.selectedDateRange.start.year,
props.selectedDateRange.start.month - 1, // JS months are 0-based
props.selectedDateRange.start.day
);
const end = new Date(
props.selectedDateRange.end.year,
props.selectedDateRange.end.month - 1,
props.selectedDateRange.end.day
);
setDisplayDates([start, end]);
}
}, [props.selectedDateRange]);
const createNaiveTime = (dateTime: DateTime) => {
if (!dateTime) return null;
return DateTime.fromObject({
hour: dateTime.hour,
minute: dateTime.minute
});
};
const selectCustomRange = (newDates: [Date, Date]) => {
if (!newDates || !newDates[0] || !newDates[1]) return;
setDisplayDates(newDates);
const selectedTzStart = DateTime.fromObject({
year: newDates[0].getFullYear(),
month: newDates[0].getMonth() + 1,
day: newDates[0].getDate(),
hour: 0,
minute: 0
}).setZone(Settings.defaultZone);
const selectedTzEnd = DateTime.fromObject({
year: newDates[1].getFullYear(),
month: newDates[1].getMonth() + 1,
day: newDates[1].getDate(),
hour: 23,
minute: 59
}).setZone(Settings.defaultZone);
const updatedRange = Interval.fromDateTimes(selectedTzStart, selectedTzEnd);
setRange(updatedRange);
const selectCustomRange = (range) => {
let newRange;
if (props.singleDay) {
newRange = Interval.fromDateTimes(
DateTime.fromJSDate(range),
DateTime.fromJSDate(range),
);
} else {
newRange = Interval.fromDateTimes(
DateTime.fromJSDate(range[0]),
DateTime.fromJSDate(range[1]),
);
}
setRange(newRange);
setValue(CUSTOM_RANGE);
};
const setRangeTimeStart = (naiveTime: DateTime) => {
if (!range.end || !naiveTime) return;
const newStart = range.start.set({
hour: naiveTime.hour,
minute: naiveTime.minute
const setRangeTimeStart = (value: DateTime) => {
if (!range.end || value > range.end) {
return;
}
const newRange = range.start.set({
hour: value.hour,
minute: value.minute,
});
if (newStart > range.end) return;
setRange(Interval.fromDateTimes(newStart, range.end));
setRange(Interval.fromDateTimes(newRange, range.end));
setValue(CUSTOM_RANGE);
};
const setRangeTimeEnd = (naiveTime: DateTime) => {
if (!range.start || !naiveTime) return;
const newEnd = range.end.set({
hour: naiveTime.hour,
minute: naiveTime.minute
});
if (newEnd < range.start) return;
setRange(Interval.fromDateTimes(range.start, newEnd));
const setRangeTimeEnd = (value: DateTime) => {
if (!range.start || (value && value < range.start)) {
return;
}
const newRange = range.end.set({ hour: value.hour, minute: value.minute });
setRange(Interval.fromDateTimes(range.start, newRange));
setValue(CUSTOM_RANGE);
};
const selectValue = (value: string) => {
const newRange = getDateRangeFromValue(value);
if (!newRange.start || !newRange.end) {
setRange(Interval.fromDateTimes(DateTime.now(), DateTime.now()));
setDisplayDates([new Date(), new Date()]);
setValue(null);
return;
}
const zonedStart = newRange.start.setZone(Settings.defaultZone);
const zonedEnd = newRange.end.setZone(Settings.defaultZone);
setRange(Interval.fromDateTimes(zonedStart, zonedEnd));
const start = new Date(
zonedStart.year,
zonedStart.month - 1,
zonedStart.day
);
const end = new Date(
zonedEnd.year,
zonedEnd.month - 1,
zonedEnd.day
);
setDisplayDates([start, end]);
const range = getDateRangeFromValue(value);
setRange(range);
setValue(value);
};
@ -140,9 +77,9 @@ function DateRangePopup(props: any) {
const isUSLocale =
navigator.language === 'en-US' || navigator.language.startsWith('en-US');
const naiveStartTime = createNaiveTime(range.start);
const naiveEndTime = createNaiveTime(range.end);
const rangeForDisplay = props.singleDay
? range.start.ts
: [range.start!.startOf('day').ts, range.end!.startOf('day').ts];
return (
<div className={styles.wrapper}>
<div className={`${styles.body} h-fit`}>
@ -166,7 +103,7 @@ function DateRangePopup(props: any) {
shouldCloseCalendar={() => false}
isOpen
maxDate={new Date()}
value={displayDates}
value={rangeForDisplay}
calendarProps={{
tileDisabled: props.isTileDisabled,
selectRange: !props.singleDay,
@ -185,7 +122,7 @@ function DateRangePopup(props: any) {
<span>{range.start.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
<TimePicker
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
value={naiveStartTime}
value={range.start}
onChange={setRangeTimeStart}
needConfirm={false}
showNow={false}
@ -195,7 +132,7 @@ function DateRangePopup(props: any) {
<span>{range.end.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
<TimePicker
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
value={naiveEndTime}
value={range.end}
onChange={setRangeTimeEnd}
needConfirm={false}
showNow={false}

View file

@ -1,17 +1,9 @@
/* eslint-disable i18next/no-literal-string */
import { ResourceType, Timed } from 'Player';
import { WsChannel } from 'Player/web/messages';
import MobilePlayer from 'Player/mobile/IOSPlayer';
import WebPlayer from 'Player/web/WebPlayer';
import { observer } from 'mobx-react-lite';
import React, {
useMemo,
useState,
useEffect,
useCallback,
useRef,
} from 'react';
import i18n from 'App/i18n'
import React, { useMemo, useState } from 'react';
import { useModal } from 'App/components/Modal';
import {
@ -20,27 +12,25 @@ import {
} from 'App/components/Session/playerContext';
import { formatMs } from 'App/date';
import { useStore } from 'App/mstore';
import { formatBytes, debounceCall } from 'App/utils';
import { formatBytes } from 'App/utils';
import { Icon, NoContent, Tabs } from 'UI';
import { Tooltip, Input, Switch, Form } from 'antd';
import {
SearchOutlined,
InfoCircleOutlined,
} from '@ant-design/icons';
import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons';
import FetchDetailsModal from 'Shared/FetchDetailsModal';
import { WsChannel } from 'App/player/web/messages';
import BottomBlock from '../BottomBlock';
import InfoLine from '../BottomBlock/InfoLine';
import TabSelector from '../TabSelector';
import TimeTable from '../TimeTable';
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
import WSPanel from './WSPanel';
import { useTranslation } from 'react-i18next';
import { mergeListsWithZoom, processInChunks } from './utils'
// Constants remain the same
const INDEX_KEY = 'network';
const ALL = 'ALL';
const XHR = 'xhr';
const JS = 'js';
@ -72,9 +62,6 @@ export const NETWORK_TABS = TAP_KEYS.map((tab) => ({
const DOM_LOADED_TIME_COLOR = 'teal';
const LOAD_TIME_COLOR = 'red';
const BATCH_SIZE = 2500;
const INITIAL_LOAD_SIZE = 5000;
export function renderType(r: any) {
return (
<Tooltip style={{ width: '100%' }} title={<div>{r.type}</div>}>
@ -92,17 +79,13 @@ export function renderName(r: any) {
}
function renderSize(r: any) {
const t = i18n.t;
const notCaptured = t('Not captured');
const resSizeStr = t('Resource size')
const { t } = useTranslation();
if (r.responseBodySize) return formatBytes(r.responseBodySize);
let triggerText;
let content;
if (r.responseBodySize) {
triggerText = formatBytes(r.responseBodySize);
content = undefined;
} else if (r.decodedBodySize == null || r.decodedBodySize === 0) {
if (r.decodedBodySize == null || r.decodedBodySize === 0) {
triggerText = 'x';
content = notCaptured;
content = t('Not captured');
} else {
const headerSize = r.headerSize || 0;
const showTransferred = r.headerSize != null;
@ -117,7 +100,7 @@ function renderSize(r: any) {
)} transferred over network`}
</li>
)}
<li>{`${resSizeStr}: ${formatBytes(r.decodedBodySize)} `}</li>
<li>{`${t('Resource size')}: ${formatBytes(r.decodedBodySize)} `}</li>
</ul>
);
}
@ -185,8 +168,6 @@ function renderStatus({
);
}
// Main component for Network Panel
function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
const { player, store } = React.useContext(PlayerContext);
const { sessionStore, uiPlayerStore } = useStore();
@ -235,7 +216,6 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
const getTabNum = (tab: string) => tabsArr.findIndex((t) => t === tab) + 1;
const getTabName = (tabId: string) => tabNames[tabId];
return (
<NetworkPanelComp
loadTime={loadTime}
@ -248,8 +228,8 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
resourceListNow={resourceListNow}
player={player}
startedAt={startedAt}
websocketList={websocketList}
websocketListNow={websocketListNow}
websocketList={websocketList as WSMessage[]}
websocketListNow={websocketListNow as WSMessage[]}
getTabNum={getTabNum}
getTabName={getTabName}
showSingleTab={showSingleTab}
@ -289,7 +269,9 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
resourceListNow={resourceListNow}
player={player}
startedAt={startedAt}
// @ts-ignore
websocketList={websocketList}
// @ts-ignore
websocketListNow={websocketListNow}
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
@ -298,35 +280,12 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
);
}
const useInfiniteScroll = (loadMoreCallback: () => void, hasMore: boolean) => {
const observerRef = useRef<IntersectionObserver>(null);
const loadingRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const observer = new IntersectionObserver(
(entries) => {
if (entries[0]?.isIntersecting && hasMore) {
loadMoreCallback();
}
},
{ threshold: 0.1 },
);
if (loadingRef.current) {
observer.observe(loadingRef.current);
}
// @ts-ignore
observerRef.current = observer;
return () => {
if (observerRef.current) {
observerRef.current.disconnect();
}
};
}, [loadMoreCallback, hasMore, loadingRef]);
return loadingRef;
type WSMessage = Timed & {
channelName: string;
data: string;
timestamp: number;
dir: 'up' | 'down';
messageType: string;
};
interface Props {
@ -343,8 +302,8 @@ interface Props {
resourceList: Timed[];
fetchListNow: Timed[];
resourceListNow: Timed[];
websocketList: Array<WsChannel>;
websocketListNow: Array<WsChannel>;
websocketList: Array<WSMessage>;
websocketListNow: Array<WSMessage>;
player: WebPlayer | MobilePlayer;
startedAt: number;
isMobile?: boolean;
@ -390,189 +349,107 @@ export const NetworkPanelComp = observer(
>(null);
const { showModal } = useModal();
const [showOnlyErrors, setShowOnlyErrors] = useState(false);
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false);
const [isLoading, setIsLoading] = useState(true);
const [isProcessing, setIsProcessing] = useState(false);
const [displayedItems, setDisplayedItems] = useState([]);
const [totalItems, setTotalItems] = useState(0);
const [summaryStats, setSummaryStats] = useState({
resourcesSize: 0,
transferredSize: 0,
});
const originalListRef = useRef([]);
const socketListRef = useRef([]);
const {
sessionStore: { devTools },
} = useStore();
const { filter } = devTools[INDEX_KEY];
const { activeTab } = devTools[INDEX_KEY];
const activeIndex = activeOutsideIndex ?? devTools[INDEX_KEY].index;
const [inputFilterValue, setInputFilterValue] = useState(filter);
const debouncedFilter = useCallback(
debounceCall((filterValue) => {
devTools.update(INDEX_KEY, { filter: filterValue });
}, 300),
[],
const socketList = useMemo(
() =>
websocketList.filter(
(ws, i, arr) =>
arr.findIndex((it) => it.channelName === ws.channelName) === i,
),
[websocketList],
);
// Process socket lists once
useEffect(() => {
const uniqueSocketList = websocketList.filter(
(ws, i, arr) =>
arr.findIndex((it) => it.channelName === ws.channelName) === i,
);
socketListRef.current = uniqueSocketList;
}, [websocketList.length]);
// Initial data processing - do this only once when data changes
useEffect(() => {
setIsLoading(true);
// Heaviest operation here, will create a final merged network list
const processData = async () => {
const fetchUrls = new Set(
fetchList.map((ft) => {
return `${ft.name}-${Math.floor(ft.time / 100)}-${Math.floor(ft.duration / 100)}`;
}),
);
// We want to get resources that aren't in fetch list
const filteredResources = await processInChunks(resourceList, (chunk) =>
chunk.filter((res: any) => {
const key = `${res.name}-${Math.floor(res.time / 100)}-${Math.floor(res.duration / 100)}`;
return !fetchUrls.has(key);
}),
BATCH_SIZE,
25,
);
const processedSockets = socketListRef.current.map((ws: any) => ({
...ws,
type: 'websocket',
method: 'ws',
url: ws.channelName,
name: ws.channelName,
status: '101',
duration: 0,
transferredBodySize: 0,
}));
const mergedList: Timed[] = mergeListsWithZoom(
filteredResources as Timed[],
fetchList,
processedSockets as Timed[],
{ enabled: Boolean(zoomEnabled), start: zoomStartTs ?? 0, end: zoomEndTs ?? 0 }
)
originalListRef.current = mergedList;
setTotalItems(mergedList.length);
calculateResourceStats(resourceList);
// Only display initial chunk
setDisplayedItems(mergedList.slice(0, INITIAL_LOAD_SIZE));
setIsLoading(false);
};
void processData();
}, [
resourceList.length,
fetchList.length,
socketListRef.current.length,
zoomEnabled,
zoomStartTs,
zoomEndTs,
]);
const calculateResourceStats = (resourceList: Record<string, any>) => {
setTimeout(() => {
let resourcesSize = 0
let transferredSize = 0
resourceList.forEach(({ decodedBodySize, headerSize, encodedBodySize }: any) => {
resourcesSize += decodedBodySize || 0
transferredSize += (headerSize || 0) + (encodedBodySize || 0)
})
setSummaryStats({
resourcesSize,
transferredSize,
});
}, 0);
}
useEffect(() => {
if (originalListRef.current.length === 0) return;
setIsProcessing(true);
const applyFilters = async () => {
let filteredItems: any[] = originalListRef.current;
filteredItems = await processInChunks(filteredItems, (chunk) =>
chunk.filter(
(it) => {
let valid = true;
if (showOnlyErrors) {
valid = parseInt(it.status) >= 400 || !it.success || it.error
}
if (filter) {
try {
const regex = new RegExp(filter, 'i');
valid = valid && regex.test(it.status) || regex.test(it.name) || regex.test(it.type) || regex.test(it.method);
} catch (e) {
valid = valid && String(it.status).includes(filter) || it.name.includes(filter) || it.type.includes(filter) || (it.method && it.method.includes(filter));
const list = useMemo(
() =>
// TODO: better merge (with body size info) - do it in player
resourceList
.filter(
(res) =>
!fetchList.some((ft) => {
// res.url !== ft.url doesn't work on relative URLs appearing within fetchList (to-fix in player)
if (res.name === ft.name) {
if (res.time === ft.time) return true;
if (res.url.includes(ft.url)) {
return (
Math.abs(res.time - ft.time) < 350 ||
Math.abs(res.timestamp - ft.timestamp) < 350
);
}
}
}
if (activeTab !== ALL) {
valid = valid && TYPE_TO_TAB[it.type] === activeTab;
}
return valid;
},
),
);
if (res.name !== ft.name) {
return false;
}
if (Math.abs(res.time - ft.time) > 250) {
return false;
} // TODO: find good epsilons
if (Math.abs(res.duration - ft.duration) > 200) {
return false;
}
// Update displayed items
setDisplayedItems(filteredItems.slice(0, INITIAL_LOAD_SIZE));
setTotalItems(filteredItems.length);
setIsProcessing(false);
};
return true;
}),
)
.concat(fetchList)
.concat(
socketList.map((ws) => ({
...ws,
type: 'websocket',
method: 'ws',
url: ws.channelName,
name: ws.channelName,
status: '101',
duration: 0,
transferredBodySize: 0,
})),
)
.filter((req) =>
zoomEnabled
? req.time >= zoomStartTs! && req.time <= zoomEndTs!
: true,
)
.sort((a, b) => a.time - b.time),
[resourceList.length, fetchList.length, socketList.length],
);
void applyFilters();
}, [filter, activeTab, showOnlyErrors]);
let filteredList = useMemo(() => {
if (!showOnlyErrors) {
return list;
}
return list.filter(
(it) => parseInt(it.status) >= 400 || !it.success || it.error,
);
}, [showOnlyErrors, list]);
filteredList = useRegExListFilterMemo(
filteredList,
(it) => [it.status, it.name, it.type, it.method],
filter,
);
filteredList = useTabListFilterMemo(
filteredList,
(it) => TYPE_TO_TAB[it.type],
ALL,
activeTab,
);
const loadMoreItems = useCallback(() => {
if (isProcessing) return;
setIsProcessing(true);
setTimeout(() => {
setDisplayedItems((prevItems) => {
const currentLength = prevItems.length;
const newItems = originalListRef.current.slice(
currentLength,
currentLength + BATCH_SIZE,
);
return [...prevItems, ...newItems];
});
setIsProcessing(false);
}, 10);
}, [isProcessing]);
const hasMoreItems = displayedItems.length < totalItems;
const loadingRef = useInfiniteScroll(loadMoreItems, hasMoreItems);
const onTabClick = (activeTab) => {
const onTabClick = (activeTab: (typeof TAP_KEYS)[number]) =>
devTools.update(INDEX_KEY, { activeTab });
};
const onFilterChange = ({ target: { value } }) => {
setInputFilterValue(value)
debouncedFilter(value);
};
const onFilterChange = ({
target: { value },
}: React.ChangeEvent<HTMLInputElement>) =>
devTools.update(INDEX_KEY, { filter: value });
// AutoScroll
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
displayedItems,
filteredList,
getLastItemTime(fetchListNow, resourceListNow),
activeIndex,
(index) => devTools.update(INDEX_KEY, { index }),
@ -585,6 +462,24 @@ export const NetworkPanelComp = observer(
timeoutStartAutoscroll();
};
const resourcesSize = useMemo(
() =>
resourceList.reduce(
(sum, { decodedBodySize }) => sum + (decodedBodySize || 0),
0,
),
[resourceList.length],
);
const transferredSize = useMemo(
() =>
resourceList.reduce(
(sum, { headerSize, encodedBodySize }) =>
sum + (headerSize || 0) + (encodedBodySize || 0),
0,
),
[resourceList.length],
);
const referenceLines = useMemo(() => {
const arr = [];
@ -618,7 +513,7 @@ export const NetworkPanelComp = observer(
isSpot={isSpot}
time={item.time + startedAt}
resource={item}
rows={displayedItems}
rows={filteredList}
fetchPresented={fetchList.length > 0}
/>,
{
@ -630,10 +525,12 @@ export const NetworkPanelComp = observer(
},
},
);
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
stopAutoscroll();
};
const tableCols = useMemo(() => {
const cols = [
const tableCols = React.useMemo(() => {
const cols: any[] = [
{
label: t('Status'),
dataKey: 'status',
@ -688,7 +585,7 @@ export const NetworkPanelComp = observer(
});
}
return cols;
}, [showSingleTab, activeTab, t, getTabName, getTabNum, isSpot]);
}, [showSingleTab]);
return (
<BottomBlock
@ -720,7 +617,7 @@ export const NetworkPanelComp = observer(
name="filter"
onChange={onFilterChange}
width={280}
value={inputFilterValue}
value={filter}
size="small"
prefix={<SearchOutlined className="text-neutral-400" />}
/>
@ -728,7 +625,7 @@ export const NetworkPanelComp = observer(
</BottomBlock.Header>
<BottomBlock.Content>
<div className="flex items-center justify-between px-4 border-b bg-teal/5 h-8">
<div className="flex items-center">
<div>
<Form.Item name="show-errors-only" className="mb-0">
<label
style={{
@ -745,29 +642,21 @@ export const NetworkPanelComp = observer(
<span className="text-sm ms-2">4xx-5xx Only</span>
</label>
</Form.Item>
{isProcessing && (
<span className="text-xs text-gray-500 ml-4">
Processing data...
</span>
)}
</div>
<InfoLine>
<InfoLine.Point label={`${totalItems}`} value="requests" />
<InfoLine.Point
label={`${displayedItems.length}/${totalItems}`}
value="displayed"
display={displayedItems.length < totalItems}
label={`${filteredList.length}`}
value=" requests"
/>
<InfoLine.Point
label={formatBytes(summaryStats.transferredSize)}
label={formatBytes(transferredSize)}
value="transferred"
display={summaryStats.transferredSize > 0}
display={transferredSize > 0}
/>
<InfoLine.Point
label={formatBytes(summaryStats.resourcesSize)}
label={formatBytes(resourcesSize)}
value="resources"
display={summaryStats.resourcesSize > 0}
display={resourcesSize > 0}
/>
<InfoLine.Point
label={formatMs(domBuildingTime)}
@ -790,67 +679,42 @@ export const NetworkPanelComp = observer(
/>
</InfoLine>
</div>
{isLoading ? (
<div className="flex items-center justify-center h-full">
<div className="text-center">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-gray-900 mx-auto mb-2"></div>
<p>Processing initial network data...</p>
<NoContent
title={
<div className="capitalize flex items-center gap-2">
<InfoCircleOutlined size={18} />
{t('No Data')}
</div>
</div>
) : (
<NoContent
title={
<div className="capitalize flex items-center gap-2">
<InfoCircleOutlined size={18} />
{t('No Data')}
</div>
}
size="small"
show={displayedItems.length === 0}
}
size="small"
show={filteredList.length === 0}
>
{/* @ts-ignore */}
<TimeTable
rows={filteredList}
tableHeight={panelHeight - 102}
referenceLines={referenceLines}
renderPopup
onRowClick={showDetailsModal}
sortBy="time"
sortAscending
onJump={(row: any) => {
devTools.update(INDEX_KEY, {
index: filteredList.indexOf(row),
});
player.jump(row.time);
}}
activeIndex={activeIndex}
>
<div>
<TimeTable
rows={displayedItems}
tableHeight={panelHeight - 102 - (hasMoreItems ? 30 : 0)}
referenceLines={referenceLines}
renderPopup
onRowClick={showDetailsModal}
sortBy="time"
sortAscending
onJump={(row) => {
devTools.update(INDEX_KEY, {
index: displayedItems.indexOf(row),
});
player.jump(row.time);
}}
activeIndex={activeIndex}
>
{tableCols}
</TimeTable>
{hasMoreItems && (
<div
ref={loadingRef}
className="flex justify-center items-center text-xs text-gray-500"
>
<div className="flex items-center">
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-gray-600 mr-2"></div>
Loading more data ({totalItems - displayedItems.length}{' '}
remaining)
</div>
</div>
)}
</div>
{selectedWsChannel ? (
<WSPanel
socketMsgList={selectedWsChannel}
onClose={() => setSelectedWsChannel(null)}
/>
) : null}
</NoContent>
)}
{tableCols}
</TimeTable>
{selectedWsChannel ? (
<WSPanel
socketMsgList={selectedWsChannel}
onClose={() => setSelectedWsChannel(null)}
/>
) : null}
</NoContent>
</BottomBlock.Content>
</BottomBlock>
);
@ -858,6 +722,7 @@ export const NetworkPanelComp = observer(
);
const WebNetworkPanel = observer(NetworkPanelCont);
const MobileNetworkPanel = observer(MobileNetworkPanelCont);
export { WebNetworkPanel, MobileNetworkPanel };

View file

@ -1,178 +0,0 @@
export function mergeListsWithZoom<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(
arr1: T[],
arr2: Y[],
arr3: Z[],
zoom?: { enabled: boolean; start: number; end: number },
): Array<T | Y | Z> {
// Early return for empty arrays
if (arr1.length === 0 && arr2.length === 0 && arr3.length === 0) {
return [];
}
// Optimized for common case - no zoom
if (!zoom?.enabled) {
return mergeThreeSortedArrays(arr1, arr2, arr3);
}
// Binary search for start indexes (faster than linear search for large arrays)
const index1 = binarySearchStartIndex(arr1, zoom.start);
const index2 = binarySearchStartIndex(arr2, zoom.start);
const index3 = binarySearchStartIndex(arr3, zoom.start);
// Merge arrays within zoom range
return mergeThreeSortedArraysWithinRange(
arr1,
arr2,
arr3,
index1,
index2,
index3,
zoom.start,
zoom.end,
);
}
function binarySearchStartIndex<T extends Record<string, any>>(
arr: T[],
threshold: number,
): number {
if (arr.length === 0) return 0;
let low = 0;
let high = arr.length - 1;
// Handle edge cases first for better performance
if (arr[high].time < threshold) return arr.length;
if (arr[low].time >= threshold) return 0;
while (low <= high) {
const mid = Math.floor((low + high) / 2);
if (arr[mid].time < threshold) {
low = mid + 1;
} else {
high = mid - 1;
}
}
return low;
}
function mergeThreeSortedArrays<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(arr1: T[], arr2: Y[], arr3: Z[]): Array<T | Y | Z> {
const totalLength = arr1.length + arr2.length + arr3.length;
// prealloc array size
const result = new Array(totalLength);
let i = 0,
j = 0,
k = 0,
index = 0;
while (i < arr1.length || j < arr2.length || k < arr3.length) {
const val1 = i < arr1.length ? arr1[i].time : Infinity;
const val2 = j < arr2.length ? arr2[j].time : Infinity;
const val3 = k < arr3.length ? arr3[k].time : Infinity;
if (val1 <= val2 && val1 <= val3) {
result[index++] = arr1[i++];
} else if (val2 <= val1 && val2 <= val3) {
result[index++] = arr2[j++];
} else {
result[index++] = arr3[k++];
}
}
return result;
}
// same as above, just with zoom stuff
function mergeThreeSortedArraysWithinRange<
T extends Record<string, any>,
Y extends Record<string, any>,
Z extends Record<string, any>,
>(
arr1: T[],
arr2: Y[],
arr3: Z[],
startIdx1: number,
startIdx2: number,
startIdx3: number,
start: number,
end: number,
): Array<T | Y | Z> {
// we don't know beforehand how many items will be there
const result = [];
let i = startIdx1;
let j = startIdx2;
let k = startIdx3;
while (i < arr1.length || j < arr2.length || k < arr3.length) {
const val1 = i < arr1.length ? arr1[i].time : Infinity;
const val2 = j < arr2.length ? arr2[j].time : Infinity;
const val3 = k < arr3.length ? arr3[k].time : Infinity;
// Early termination: if all remaining values exceed end time
if (Math.min(val1, val2, val3) > end) {
break;
}
if (val1 <= val2 && val1 <= val3) {
if (val1 <= end) {
result.push(arr1[i]);
}
i++;
} else if (val2 <= val1 && val2 <= val3) {
if (val2 <= end) {
result.push(arr2[j]);
}
j++;
} else {
if (val3 <= end) {
result.push(arr3[k]);
}
k++;
}
}
return result;
}
export function processInChunks(
items: any[],
processFn: (item: any) => any,
chunkSize = 1000,
overscan = 0,
) {
return new Promise((resolve) => {
if (items.length === 0) {
resolve([]);
return;
}
let result: any[] = [];
let index = 0;
const processNextChunk = () => {
const chunk = items.slice(index, index + chunkSize + overscan);
result = result.concat(processFn(chunk));
index += chunkSize;
if (index < items.length) {
setTimeout(processNextChunk, 0);
} else {
resolve(result);
}
};
processNextChunk();
});
}

View file

@ -18,7 +18,7 @@ function DocCard(props: Props) {
} = props;
return (
<div className={cn('p-5 bg-gray-lightest mb-4 rounded-lg', className)}>
<div className={cn('p-5 bg-gray-lightest mb-4 rounded', className)}>
<div className="font-medium mb-2 flex items-center">
{props.icon && (
<div

View file

@ -5,7 +5,6 @@ import cn from 'classnames';
import { Loader } from 'UI';
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
import { useTranslation } from 'react-i18next';
import { VList } from 'virtua';
function TruncatedText({
text,
@ -125,7 +124,7 @@ export function AutocompleteModal({
if (index === blocksAmount - 1 && blocksAmount > 1) {
str += ' and ';
}
str += block.trim();
str += `"${block.trim()}"`;
if (index < blocksAmount - 2) {
str += ', ';
}
@ -171,27 +170,25 @@ export function AutocompleteModal({
<>
<div
className="flex flex-col gap-2 overflow-y-auto py-2 overflow-x-hidden text-ellipsis"
style={{ height: Math.min(sortedOptions.length * 32, 240) }}
style={{ maxHeight: 200 }}
>
<VList count={sortedOptions.length} itemSize={18}>
{sortedOptions.map((item) => (
<div
key={item.value}
onClick={() => onSelectOption(item)}
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
>
<Checkbox checked={isSelected(item)} /> {item.label}
</div>
))}
</VList>
{sortedOptions.map((item) => (
<div
key={item.value}
onClick={() => onSelectOption(item)}
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
>
<Checkbox checked={isSelected(item)} /> {item.label}
</div>
))}
</div>
{query.length ? (
<div className="border-y border-y-gray-light py-2">
<div
className="whitespace-nowrap truncate w-full rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
className="whitespace-normal rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
onClick={applyQuery}
>
{t('Apply')}&nbsp;<span className='font-semibold'>{queryStr}</span>
{t('Apply')}&nbsp;{queryStr}
</div>
</div>
) : null}

View file

@ -128,10 +128,8 @@ const FilterAutoComplete = observer(
};
const handleFocus = () => {
if (!initialFocus) {
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
}
setInitialFocus(true);
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
};
return (

View file

@ -9,10 +9,8 @@ function LiveSessionSearch() {
const appliedFilter = searchStoreLive.instance;
useEffect(() => {
if (projectsStore.activeSiteId) {
void searchStoreLive.fetchSessions(true);
}
}, [projectsStore.activeSiteId])
void searchStoreLive.fetchSessions();
}, []);
const onAddFilter = (filter: any) => {
filter.autoOpen = true;

View file

@ -53,6 +53,9 @@ function SessionFilters() {
onBeforeLoad: async () => {
await reloadTags();
},
onLoaded: () => {
debounceFetch = debounce(() => searchStore.fetchSessions(), 500);
}
});
const onAddFilter = (filter: any) => {

View file

@ -19,13 +19,11 @@ export default function MetaItem(props: Props) {
<TextEllipsis
text={label}
className="p-0"
maxWidth={'300px'}
popupProps={{ size: 'small', disabled: true }}
/>
<span className="bg-neutral-200 inline-block w-[1px] min-h-[17px]"></span>
<TextEllipsis
text={value}
maxWidth={'350px'}
className="p-0 text-neutral-500"
popupProps={{ size: 'small', disabled: true }}
/>

View file

@ -7,15 +7,13 @@ interface Props {
className?: string;
metaList: any[];
maxLength?: number;
onMetaClick?: (meta: { name: string, value: string }) => void;
horizontal?: boolean;
}
export default function SessionMetaList(props: Props) {
const { className = '', metaList, maxLength = 14, horizontal = false } = props;
const { className = '', metaList, maxLength = 14 } = props;
return (
<div className={cn('flex items-center gap-1', horizontal ? '' : 'flex-wrap', className)}>
<div className={cn('flex items-center flex-wrap gap-1', className)}>
{metaList.slice(0, maxLength).map(({ label, value }, index) => (
<React.Fragment key={index}>
<MetaItem label={label} value={`${value}`} />

View file

@ -5,7 +5,6 @@ import ListingVisibility from './components/ListingVisibility';
import DefaultPlaying from './components/DefaultPlaying';
import DefaultTimezone from './components/DefaultTimezone';
import CaptureRate from './components/CaptureRate';
import { useTranslation } from 'react-i18next';
function SessionSettings() {

Some files were not shown because too many files have changed in this diff Show more