Compare commits
110 commits
assist_rem
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90510aa33b | ||
|
|
96a70f5d41 | ||
|
|
d4a13edcf0 | ||
|
|
51fad91a22 | ||
|
|
36abcda1e1 | ||
|
|
dd5f464f73 | ||
|
|
f9ada41272 | ||
|
|
9e24a3583e | ||
|
|
0a3129d3cd | ||
|
|
99d61db9d9 | ||
|
|
133958622e | ||
|
|
fb021f606f | ||
|
|
a2905fa8ed | ||
|
|
beec2283fd | ||
|
|
6c8b55019e | ||
|
|
e3e3e11227 | ||
|
|
c6f7de04cc | ||
|
|
2921c17cbf | ||
|
|
7eb3f5c4c8 | ||
|
|
5a9a8e588a | ||
|
|
4b14258266 | ||
|
|
744d2d4311 | ||
|
|
64242a5dc0 | ||
|
|
cae3002697 | ||
|
|
3d3c62196b | ||
|
|
e810958a5d | ||
|
|
39fa9787d1 | ||
|
|
c9c1ad4dde | ||
|
|
d9868928be | ||
|
|
a460d8c9a2 | ||
|
|
930417aab4 | ||
|
|
07bc184f4d | ||
|
|
71b7cca569 | ||
|
|
355d27eaa0 | ||
|
|
66b485cccf | ||
|
|
de33a42151 | ||
|
|
f12bdebf82 | ||
|
|
bbfa20c693 | ||
|
|
f264ba043d | ||
|
|
a05dce8125 | ||
|
|
3a1635d81f | ||
|
|
ccb332c636 | ||
|
|
80ffa15959 | ||
|
|
b2e961d621 | ||
|
|
b4d0598f23 | ||
|
|
e77f083f10 | ||
|
|
58da1d3f64 | ||
|
|
447fc26a2a | ||
|
|
9bdf6e4f92 | ||
|
|
01f403e12d | ||
|
|
39eb943b86 | ||
|
|
366b0d38b0 | ||
|
|
f4d5b3c06e | ||
|
|
93ae18133e | ||
|
|
fbe5d78270 | ||
|
|
b803eed1d4 | ||
|
|
9ed3cb1b7e | ||
|
|
5e0e5730ba | ||
|
|
d78b33dcd2 | ||
|
|
4b1ca200b4 | ||
|
|
08d930f9ff | ||
|
|
da37809bc8 | ||
|
|
d922fc7ad5 | ||
|
|
796360fdd2 | ||
|
|
13dbb60d8b | ||
|
|
9e20a49128 | ||
|
|
91f8cc1399 | ||
|
|
f8ba3f6d89 | ||
|
|
85e30b3692 | ||
|
|
0360e3726e | ||
|
|
77bbb5af36 | ||
|
|
ab0d4cfb62 | ||
|
|
3fd506a812 | ||
|
|
e8432e2dec | ||
|
|
5c76a8524c | ||
|
|
3ba40a4811 | ||
|
|
f9a3f24590 | ||
|
|
85d6d0abac | ||
|
|
b3594136ce | ||
|
|
8f67edde8d | ||
|
|
74ed29915b | ||
|
|
3ca71ec211 | ||
|
|
0e469fd056 | ||
|
|
a8cb0e1643 | ||
|
|
e171f0d8d5 | ||
|
|
68ea291444 | ||
|
|
05cbb831c7 | ||
|
|
5070ded1f4 | ||
|
|
77610a4924 | ||
|
|
7c34e4a0f6 | ||
|
|
330e21183f | ||
|
|
30ce37896c | ||
|
|
80a7817e7d | ||
|
|
1b9c568cb1 | ||
|
|
3759771ae9 | ||
|
|
f6ae5aba88 | ||
|
|
5190dc512a | ||
|
|
3fcccb51e8 | ||
|
|
26077d5689 | ||
|
|
00c57348fd | ||
|
|
1f9bc5520a | ||
|
|
aef94618f6 | ||
|
|
2a330318c7 | ||
|
|
6777d5ce2a | ||
|
|
8a6f8fe91f | ||
|
|
7b078fed4c | ||
|
|
894d4c84b3 | ||
|
|
46390a3ba9 | ||
|
|
621667f5ce | ||
|
|
a72f476f1c |
142 changed files with 3776 additions and 1419 deletions
122
.github/workflows/assist-server-ee.yaml
vendored
Normal file
122
.github/workflows/assist-server-ee.yaml
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
# This action will push the assist changes to aws
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_security_checks:
|
||||
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
|
||||
required: false
|
||||
default: "false"
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- "ee/assist-server/**"
|
||||
|
||||
name: Build and Deploy Assist-Server EE
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We need to diff with old commit
|
||||
# to see which workers got changed.
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: ./.github/composite-actions/update-keys
|
||||
with:
|
||||
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
|
||||
assist_key: ${{ secrets.ASSIST_KEY }}
|
||||
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
|
||||
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
|
||||
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
|
||||
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
|
||||
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
|
||||
license_key: ${{ secrets.EE_LICENSE_KEY }}
|
||||
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
|
||||
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
|
||||
pg_password: ${{ secrets.EE_PG_PASSWORD }}
|
||||
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
name: Update Keys
|
||||
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
|
||||
id: setcontext
|
||||
|
||||
- name: Building and Pushing Assist-Server image
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
|
||||
ENVIRONMENT: staging
|
||||
run: |
|
||||
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
|
||||
cd assist-server
|
||||
PUSH_IMAGE=0 bash -x ./build.sh ee
|
||||
[[ "x$skip_security_checks" == "xtrue" ]] || {
|
||||
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
|
||||
images=("assist-server")
|
||||
for image in ${images[*]};do
|
||||
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
err_code=$?
|
||||
[[ $err_code -ne 0 ]] && {
|
||||
exit $err_code
|
||||
}
|
||||
} && {
|
||||
echo "Skipping Security Checks"
|
||||
}
|
||||
images=("assist-server")
|
||||
for image in ${images[*]};do
|
||||
docker push $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
- name: Creating old image input
|
||||
run: |
|
||||
#
|
||||
# Create yaml with existing image tags
|
||||
#
|
||||
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
|
||||
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
|
||||
|
||||
echo > /tmp/image_override.yaml
|
||||
|
||||
for line in `cat /tmp/image_tag.txt`;
|
||||
do
|
||||
image_array=($(echo "$line" | tr ':' '\n'))
|
||||
cat <<EOF >> /tmp/image_override.yaml
|
||||
${image_array[0]}:
|
||||
image:
|
||||
# We've to strip off the -ee, as helm will append it.
|
||||
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
|
||||
EOF
|
||||
done
|
||||
- name: Deploy to kubernetes
|
||||
run: |
|
||||
pwd
|
||||
cd scripts/helmcharts/
|
||||
|
||||
# Update changed image tag
|
||||
sed -i "/assist-server/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
|
||||
|
||||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,assist-server,quickwit,connector} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# We're not passing -ee flag, because helm will add that.
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
189
.github/workflows/patch-build-old.yaml
vendored
Normal file
189
.github/workflows/patch-build-old.yaml
vendored
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
required: true
|
||||
default: 'chalice,frontend'
|
||||
tag:
|
||||
description: 'Tag to update.'
|
||||
required: true
|
||||
type: string
|
||||
branch:
|
||||
description: 'Branch to build patches from. Make sure the branch is uptodate with tag. Else itll cause missing commits.'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
name: Build patches from tag, rewrite commit HEAD to older timestamp, and Push the tag
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from old tag
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 4
|
||||
ref: ${{ github.event.inputs.tag }}
|
||||
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
|
||||
- name: Create backup tag with timestamp
|
||||
run: |
|
||||
set -e # Exit immediately if a command exits with a non-zero status
|
||||
TIMESTAMP=$(date +%Y%m%d%H%M%S)
|
||||
BACKUP_TAG="${{ github.event.inputs.tag }}-backup-${TIMESTAMP}"
|
||||
echo "BACKUP_TAG=${BACKUP_TAG}" >> $GITHUB_ENV
|
||||
echo "INPUT_TAG=${{ github.event.inputs.tag }}" >> $GITHUB_ENV
|
||||
git tag $BACKUP_TAG || { echo "Failed to create backup tag"; exit 1; }
|
||||
git push origin $BACKUP_TAG || { echo "Failed to push backup tag"; exit 1; }
|
||||
echo "Created backup tag: $BACKUP_TAG"
|
||||
|
||||
# Get the oldest commit date from the last 3 commits in raw format
|
||||
OLDEST_COMMIT_TIMESTAMP=$(git log -3 --pretty=format:"%at" | tail -1)
|
||||
echo "Oldest commit timestamp: $OLDEST_COMMIT_TIMESTAMP"
|
||||
# Add 1 second to the timestamp
|
||||
NEW_TIMESTAMP=$((OLDEST_COMMIT_TIMESTAMP + 1))
|
||||
echo "NEW_TIMESTAMP=$NEW_TIMESTAMP" >> $GITHUB_ENV
|
||||
|
||||
|
||||
- name: Setup yq
|
||||
uses: mikefarah/yq@master
|
||||
|
||||
# Configure AWS credentials for the first registry
|
||||
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
|
||||
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
|
||||
id: login-ecr-arm
|
||||
run: |
|
||||
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
run: echo "BRANCH_NAME=${{inputs.branch}}" >> $GITHUB_ENV
|
||||
|
||||
- name: Build
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b $INPUT_TAG --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout $INPUT_TAG
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
done
|
||||
|
||||
- name: Change commit timestamp
|
||||
run: |
|
||||
# Convert the timestamp to a date format git can understand
|
||||
NEW_DATE=$(perl -le 'print scalar gmtime($ARGV[0])." +0000"' $NEW_TIMESTAMP)
|
||||
echo "Setting commit date to: $NEW_DATE"
|
||||
|
||||
# Amend the commit with the new date
|
||||
GIT_COMMITTER_DATE="$NEW_DATE" git commit --amend --no-edit --date="$NEW_DATE"
|
||||
|
||||
# Verify the change
|
||||
git log -1 --pretty=format:"Commit now dated: %cD"
|
||||
|
||||
# git tag and push
|
||||
git tag $INPUT_TAG -f
|
||||
git push origin $INPUT_TAG -f
|
||||
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
# MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
246
.github/workflows/patch-build.yaml
vendored
246
.github/workflows/patch-build.yaml
vendored
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
|
|
@ -20,12 +19,20 @@ jobs:
|
|||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||
if: github.ref != 'refs/heads/main'
|
||||
run: |
|
||||
git pull --rebase origin main
|
||||
git remote -v
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global rebase.autoStash true
|
||||
git fetch origin main:main
|
||||
git rebase main
|
||||
git log -3
|
||||
|
||||
- name: Downloading yq
|
||||
run: |
|
||||
|
|
@ -48,6 +55,8 @@ jobs:
|
|||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
|
|
@ -65,78 +74,168 @@ jobs:
|
|||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
SERVICES_INPUT: ${{ github.event.inputs.services }}
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration
|
||||
readonly WORKING_DIR=$(pwd)
|
||||
readonly BUILD_SCRIPT_NAME="build.sh"
|
||||
readonly BACKEND_SERVICES_FILE="/tmp/backend.txt"
|
||||
|
||||
# Initialize git configuration
|
||||
setup_git() {
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
cd openreplay && git fetch origin && git checkout main # This have to be changed to specific tag
|
||||
git log -1
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
|
||||
# Get and increment image version
|
||||
image_version() {
|
||||
local service=$1
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
local current_version new_version
|
||||
|
||||
current_version=$(yq eval '.AppVersion' "$chart_path")
|
||||
new_version=$(echo "$current_version" | awk -F. '{$NF += 1; print $1"."$2"."$3}')
|
||||
echo "$new_version"
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
|
||||
# Clone MSAAS repository if not exists
|
||||
clone_msaas() {
|
||||
if [[ ! -d "$MSAAS_REPO_FOLDER" ]]; then
|
||||
git clone -b dev --recursive "https://x-access-token:${MSAAS_REPO_CLONE_TOKEN}@${MSAAS_REPO_URL}" "$MSAAS_REPO_FOLDER"
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
cd openreplay && git fetch origin && git checkout main
|
||||
git log -1
|
||||
cd "$MSAAS_REPO_FOLDER"
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
fi
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
git push --set-upstream origin $BRANCH_NAME
|
||||
done
|
||||
|
||||
# Build managed services
|
||||
build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
|
||||
echo "Building managed service: $service"
|
||||
clone_msaas
|
||||
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/api"
|
||||
else
|
||||
cd "$MSAAS_REPO_FOLDER/openreplay/$service"
|
||||
fi
|
||||
|
||||
local build_cmd="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh"
|
||||
|
||||
echo "Executing: $build_cmd"
|
||||
if ! eval "$build_cmd" 2>&1; then
|
||||
echo "Build failed for $service"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Build service with given arguments
|
||||
build_service() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local build_args=$3
|
||||
local build_script=${4:-$BUILD_SCRIPT_NAME}
|
||||
|
||||
local command="IMAGE_TAG=$version DOCKER_RUNTIME=depot DOCKER_BUILD_ARGS=--push ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash $build_script $build_args"
|
||||
echo "Executing: $command"
|
||||
eval "$command"
|
||||
}
|
||||
|
||||
# Update chart version and commit changes
|
||||
update_chart_version() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
local chart_path="$WORKING_DIR/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
|
||||
# Ensure we're in the original working directory/repository
|
||||
cd "$WORKING_DIR"
|
||||
yq eval ".AppVersion = \"$version\"" -i "$chart_path"
|
||||
git add "$chart_path"
|
||||
git commit -m "Increment $service chart version to $version"
|
||||
git push --set-upstream origin "$BRANCH_NAME"
|
||||
cd -
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
setup_git
|
||||
|
||||
# Get backend services list
|
||||
ls backend/cmd >"$BACKEND_SERVICES_FILE"
|
||||
|
||||
# Parse services input (fix for GitHub Actions syntax)
|
||||
echo "Services: ${SERVICES_INPUT:-$1}"
|
||||
IFS=',' read -ra services <<<"${SERVICES_INPUT:-$1}"
|
||||
|
||||
# Process each service
|
||||
for service in "${services[@]}"; do
|
||||
echo "Processing service: $service"
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
local foss_build_args="" ee_build_args="" build_script="$BUILD_SCRIPT_NAME"
|
||||
|
||||
# Determine build configuration based on service type
|
||||
if grep -q "$service" "$BACKEND_SERVICES_FILE"; then
|
||||
# Backend service
|
||||
cd backend
|
||||
foss_build_args="nil $service"
|
||||
ee_build_args="ee $service"
|
||||
else
|
||||
# Non-backend service
|
||||
case "$service" in
|
||||
chalice | alerts | crons)
|
||||
cd "$WORKING_DIR/api"
|
||||
;;
|
||||
*)
|
||||
cd "$service"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Special build scripts for alerts/crons
|
||||
if [[ $service == 'alerts' || $service == 'crons' ]]; then
|
||||
build_script="build_${service}.sh"
|
||||
fi
|
||||
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
|
||||
# Get version and build
|
||||
local version
|
||||
version=$(image_version "$service")
|
||||
|
||||
# Build FOSS and EE versions
|
||||
build_service "$service" "$version" "$foss_build_args"
|
||||
build_service "$service" "${version}-ee" "$ee_build_args"
|
||||
|
||||
# Build managed version for specific services
|
||||
if [[ "$service" != "chalice" && "$service" != "frontend" ]]; then
|
||||
echo "Nothing to build in managed for service $service"
|
||||
else
|
||||
build_managed "$service" "$version"
|
||||
fi
|
||||
|
||||
# Update chart and commit
|
||||
update_chart_version "$service" "$version"
|
||||
done
|
||||
cd "$WORKING_DIR"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$BACKEND_SERVICES_FILE"
|
||||
}
|
||||
|
||||
echo "Working directory: $WORKING_DIR"
|
||||
# Run main function with all arguments
|
||||
main "$SERVICES_INPUT"
|
||||
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
|
|
@ -147,8 +246,7 @@ jobs:
|
|||
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
|
||||
pr_body: |
|
||||
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
|
||||
Once this PR is merged, To update the latest tag, run the following workflow.
|
||||
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
|
||||
Once this PR is merged, tag update job will run automatically.
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
|
|
|
|||
47
.github/workflows/update-tag.yaml
vendored
47
.github/workflows/update-tag.yaml
vendored
|
|
@ -1,35 +1,42 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
description: "This workflow will build for patches for latest tag, and will Always use commit from main branch."
|
||||
inputs:
|
||||
services:
|
||||
description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false"
|
||||
required: true
|
||||
default: "false"
|
||||
|
||||
name: Force Push tag with main branch HEAD
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
name: Release tag update --force
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from main
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get latest release tag using GitHub API
|
||||
id: get-latest-tag
|
||||
run: |
|
||||
LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/${{ github.repository }}/releases/latest" \
|
||||
| jq -r .tag_name)
|
||||
|
||||
# Fallback to git command if API doesn't return a tag
|
||||
if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then
|
||||
echo "Not found latest tag"
|
||||
exit 100
|
||||
fi
|
||||
|
||||
echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV
|
||||
echo "Latest tag: $LATEST_TAG"
|
||||
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}
|
||||
|
||||
- name: Push main branch to tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
git checkout main
|
||||
git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main"
|
||||
git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force
|
||||
|
|
|
|||
|
|
@ -85,7 +85,8 @@ def __generic_query(typename, value_length=None):
|
|||
ORDER BY value"""
|
||||
|
||||
if value_length is None or value_length > 2:
|
||||
return f"""(SELECT DISTINCT value, type
|
||||
return f"""SELECT DISTINCT ON(value,type) value, type
|
||||
((SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -101,7 +102,7 @@ def __generic_query(typename, value_length=None):
|
|||
AND type='{typename.upper()}'
|
||||
AND value ILIKE %(value)s
|
||||
ORDER BY value
|
||||
LIMIT 5);"""
|
||||
LIMIT 5)) AS raw;"""
|
||||
return f"""SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
|
|
@ -326,7 +327,7 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT key, value, 'METADATA' AS TYPE
|
||||
SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
|
|
|
|||
|
|
@ -338,14 +338,14 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
SELECT details.error_id as error_id,
|
||||
name, message, users, total,
|
||||
sessions, last_occurrence, first_occurrence, chart
|
||||
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
FROM (SELECT error_id,
|
||||
JSONExtractString(toString(`$properties`), 'name') AS name,
|
||||
JSONExtractString(toString(`$properties`), 'message') AS message,
|
||||
COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT events.session_id) AS sessions,
|
||||
MAX(created_at) AS max_datetime,
|
||||
MIN(created_at) AS min_datetime,
|
||||
COUNT(DISTINCT JSONExtractString(toString(`$properties`), 'error_id'))
|
||||
COUNT(DISTINCT error_id)
|
||||
OVER() AS total
|
||||
FROM {MAIN_EVENTS_TABLE} AS events
|
||||
INNER JOIN (SELECT session_id, coalesce(user_id,toString(user_uuid)) AS user_id
|
||||
|
|
@ -357,7 +357,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}
|
||||
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
|
||||
INNER JOIN (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
INNER JOIN (SELECT error_id,
|
||||
toUnixTimestamp(MAX(created_at))*1000 AS last_occurrence,
|
||||
toUnixTimestamp(MIN(created_at))*1000 AS first_occurrence
|
||||
FROM {MAIN_EVENTS_TABLE}
|
||||
|
|
@ -366,7 +366,7 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us
|
|||
GROUP BY error_id) AS time_details
|
||||
ON details.error_id=time_details.error_id
|
||||
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
|
||||
FROM (SELECT JSONExtractString(toString(`$properties`), 'error_id') AS error_id,
|
||||
FROM (SELECT error_id,
|
||||
gs.generate_series AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS gs
|
||||
|
|
|
|||
|
|
@ -50,8 +50,8 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT username, token, url
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
data = helper.dict_to_camel_case(cur.fetchone())
|
||||
|
|
@ -95,10 +95,9 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def add(self, username, token, url, obfuscate=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||
RETURNING username, token, url;""",
|
||||
cur.mogrify(""" \
|
||||
INSERT INTO public.jira_cloud(username, token, user_id, url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s, %(url)s) RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id, "username": username,
|
||||
"token": token, "url": url})
|
||||
)
|
||||
|
|
@ -112,9 +111,10 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
cur.mogrify(""" \
|
||||
DELETE
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
|
@ -125,7 +125,7 @@ class JIRAIntegration(base.BaseIntegration):
|
|||
changes={
|
||||
"username": data.username,
|
||||
"token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
|
||||
else self.integration.token,
|
||||
else self.integration["token"],
|
||||
"url": str(data.url)
|
||||
},
|
||||
obfuscate=True
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -178,7 +178,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -1108,8 +1108,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
is_any = sh.isAny_opreator(f.operator)
|
||||
if is_any or len(f.value) == 0:
|
||||
continue
|
||||
is_negative_operator = sh.is_negation_operator(f.operator)
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
r_op = ""
|
||||
if is_negative_operator:
|
||||
r_op = sh.reverse_sql_operator(op)
|
||||
e_k_f = e_k + f"_fetch{j}"
|
||||
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
|
||||
if f.type == schemas.FetchFilterType.FETCH_URL:
|
||||
|
|
@ -1118,6 +1122,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.url_path {r_op} %({e_k_f})s", f.value, value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", 'status', op, f.value, e_k_f, True, True
|
||||
|
|
@ -1130,6 +1140,13 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.method {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.`$duration_s` {f.operator} %({e_k_f})s/1000", f.value,
|
||||
|
|
@ -1142,12 +1159,26 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.request_body {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
|
||||
event_where.append(json_condition(
|
||||
"main", "$properties", 'response_body', op, f.value, e_k_f
|
||||
))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
if is_negative_operator:
|
||||
events_conditions_not.append(
|
||||
{
|
||||
"type": f"sub.`$event_name`='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
|
||||
events_conditions_not[-1]["condition"] = sh.multi_conditions(
|
||||
f"sub.`$properties`.response_body {r_op} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f)
|
||||
else:
|
||||
logging.warning(f"undefined FETCH filter: {f.type}")
|
||||
if not apply:
|
||||
|
|
@ -1395,17 +1426,30 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
if extra_conditions and len(extra_conditions) > 0:
|
||||
_extra_or_condition = []
|
||||
for i, c in enumerate(extra_conditions):
|
||||
if sh.isAny_opreator(c.operator):
|
||||
if sh.isAny_opreator(c.operator) and c.type != schemas.EventType.REQUEST_DETAILS.value:
|
||||
continue
|
||||
e_k = f"ec_value{i}"
|
||||
op = sh.get_sql_operator(c.operator)
|
||||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
if c.type in (schemas.EventType.LOCATION.value, schemas.EventType.REQUEST.value):
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
elif c.type == schemas.EventType.REQUEST_DETAILS.value:
|
||||
for j, c_f in enumerate(c.filters):
|
||||
if sh.isAny_opreator(c_f.operator) or len(c_f.value) == 0:
|
||||
continue
|
||||
e_k += f"_{j}"
|
||||
op = sh.get_sql_operator(c_f.operator)
|
||||
c_f.value = helper.values_for_operator(value=c_f.value, op=c_f.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c_f.value, value_key=e_k)}
|
||||
if c_f.type == schemas.FetchFilterType.FETCH_URL.value:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c_f.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -165,7 +165,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
"filters": e.filters
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
|
|
@ -989,7 +989,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logger.warning(f"unsupported extra_event type:${c.type}")
|
||||
logger.warning(f"unsupported extra_event type: {c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
query_part = f"""\
|
||||
|
|
|
|||
|
|
@ -4,37 +4,41 @@ import schemas
|
|||
|
||||
|
||||
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator, schemas.MathOperator]):
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return {
|
||||
schemas.SearchEventOperator.IS: "=",
|
||||
schemas.SearchEventOperator.ON: "=",
|
||||
schemas.SearchEventOperator.ON_ANY: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
|
||||
schemas.SearchEventOperator.IS.value: "=",
|
||||
schemas.SearchEventOperator.ON.value: "=",
|
||||
schemas.SearchEventOperator.ON_ANY.value: "IN",
|
||||
schemas.SearchEventOperator.IS_NOT.value: "!=",
|
||||
schemas.SearchEventOperator.NOT_ON.value: "!=",
|
||||
schemas.SearchEventOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.SearchEventOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.SearchEventOperator.ENDS_WITH.value: "ILIKE",
|
||||
# Selector operators:
|
||||
schemas.ClickEventExtraOperator.IS: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.IS.value: "=",
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value: "!=",
|
||||
schemas.ClickEventExtraOperator.CONTAINS.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value: "NOT ILIKE",
|
||||
schemas.ClickEventExtraOperator.STARTS_WITH.value: "ILIKE",
|
||||
schemas.ClickEventExtraOperator.ENDS_WITH.value: "ILIKE",
|
||||
|
||||
schemas.MathOperator.GREATER: ">",
|
||||
schemas.MathOperator.GREATER_EQ: ">=",
|
||||
schemas.MathOperator.LESS: "<",
|
||||
schemas.MathOperator.LESS_EQ: "<=",
|
||||
schemas.MathOperator.GREATER.value: ">",
|
||||
schemas.MathOperator.GREATER_EQ.value: ">=",
|
||||
schemas.MathOperator.LESS.value: "<",
|
||||
schemas.MathOperator.LESS_EQ.value: "<=",
|
||||
}.get(op, "=")
|
||||
|
||||
|
||||
def is_negation_operator(op: schemas.SearchEventOperator):
|
||||
return op in [schemas.SearchEventOperator.IS_NOT,
|
||||
schemas.SearchEventOperator.NOT_ON,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS,
|
||||
schemas.ClickEventExtraOperator.IS_NOT,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS]
|
||||
if isinstance(op, Enum):
|
||||
op = op.value
|
||||
return op in [schemas.SearchEventOperator.IS_NOT.value,
|
||||
schemas.SearchEventOperator.NOT_ON.value,
|
||||
schemas.SearchEventOperator.NOT_CONTAINS.value,
|
||||
schemas.ClickEventExtraOperator.IS_NOT.value,
|
||||
schemas.ClickEventExtraOperator.NOT_CONTAINS.value]
|
||||
|
||||
|
||||
def reverse_sql_operator(op):
|
||||
|
|
|
|||
|
|
@ -960,36 +960,6 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
|
||||
return self
|
||||
|
||||
# We don't need this as the UI is expecting filters to override the full series' filters
|
||||
# @model_validator(mode="after")
|
||||
# def __merge_out_filters_with_series(self):
|
||||
# for f in self.filters:
|
||||
# for s in self.series:
|
||||
# found = False
|
||||
#
|
||||
# if f.is_event:
|
||||
# sub = s.filter.events
|
||||
# else:
|
||||
# sub = s.filter.filters
|
||||
#
|
||||
# for e in sub:
|
||||
# if f.type == e.type and f.operator == e.operator:
|
||||
# found = True
|
||||
# if f.is_event:
|
||||
# # If extra event: append value
|
||||
# for v in f.value:
|
||||
# if v not in e.value:
|
||||
# e.value.append(v)
|
||||
# else:
|
||||
# # If extra filter: override value
|
||||
# e.value = f.value
|
||||
# if not found:
|
||||
# sub.append(f)
|
||||
#
|
||||
# self.filters = []
|
||||
#
|
||||
# return self
|
||||
|
||||
# UI is expecting filters to override the full series' filters
|
||||
@model_validator(mode="after")
|
||||
def __override_series_filters_with_outer_filters(self):
|
||||
|
|
@ -1060,6 +1030,16 @@ class CardTable(__CardSchema):
|
|||
values["metricValue"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __enforce_AND_operator(self):
|
||||
self.metric_of = MetricOfTable(self.metric_of)
|
||||
if self.metric_of in (MetricOfTable.VISITED_URL, MetricOfTable.FETCH, \
|
||||
MetricOfTable.VISITED_URL.value, MetricOfTable.FETCH.value):
|
||||
for s in self.series:
|
||||
if s.filter is not None:
|
||||
s.filter.events_order = SearchEventOrder.AND
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(self):
|
||||
self.metric_of = MetricOfTable(self.metric_of)
|
||||
|
|
|
|||
|
|
@ -2,11 +2,12 @@ package datasaver
|
|||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
|
||||
"openreplay/backend/internal/config/db"
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/postgres"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/logger"
|
||||
. "openreplay/backend/pkg/messages"
|
||||
queue "openreplay/backend/pkg/queue/types"
|
||||
|
|
@ -50,10 +51,6 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con
|
|||
}
|
||||
|
||||
func (s *saverImpl) Handle(msg Message) {
|
||||
if msg.TypeID() == MsgCustomEvent {
|
||||
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
|
||||
}
|
||||
|
||||
var (
|
||||
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
|
||||
session *sessions.Session
|
||||
|
|
@ -69,6 +66,23 @@ func (s *saverImpl) Handle(msg Message) {
|
|||
return
|
||||
}
|
||||
|
||||
if msg.TypeID() == MsgCustomEvent {
|
||||
m := msg.(*CustomEvent)
|
||||
// Try to parse custom event payload to JSON and extract or_payload field
|
||||
type CustomEventPayload struct {
|
||||
CustomTimestamp uint64 `json:"or_timestamp"`
|
||||
}
|
||||
customPayload := &CustomEventPayload{}
|
||||
if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil {
|
||||
if customPayload.CustomTimestamp >= session.Timestamp {
|
||||
s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp)
|
||||
msg.Meta().Timestamp = customPayload.CustomTimestamp
|
||||
s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp)
|
||||
}
|
||||
}
|
||||
defer s.Handle(types.WrapCustomEvent(m))
|
||||
}
|
||||
|
||||
if IsMobileType(msg.TypeID()) {
|
||||
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
|
|
|
|||
|
|
@ -86,7 +86,8 @@ def __generic_query(typename, value_length=None):
|
|||
ORDER BY value"""
|
||||
|
||||
if value_length is None or value_length > 2:
|
||||
return f"""(SELECT DISTINCT value, type
|
||||
return f"""SELECT DISTINCT ON(value, type) value, type
|
||||
FROM ((SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
|
|
@ -102,7 +103,7 @@ def __generic_query(typename, value_length=None):
|
|||
AND type='{typename.upper()}'
|
||||
AND value ILIKE %(value)s
|
||||
ORDER BY value
|
||||
LIMIT 5);"""
|
||||
LIMIT 5)) AS raw;"""
|
||||
return f"""SELECT DISTINCT value, type
|
||||
FROM {TABLE}
|
||||
WHERE
|
||||
|
|
@ -257,7 +258,7 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
WHERE project_id = %(project_id)s
|
||||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE
|
||||
query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)})
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
||||
|
||||
ch_basic_query = errors_helper.__get_basic_constraints_ch(time_constraint=False)
|
||||
ch_basic_query.append("toString(`$properties`.error_id) = %(error_id)s")
|
||||
ch_basic_query.append("error_id = %(error_id)s")
|
||||
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
|
|
@ -95,7 +95,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
"error_id": error_id}
|
||||
|
||||
main_ch_query = f"""\
|
||||
WITH pre_processed AS (SELECT toString(`$properties`.error_id) AS error_id,
|
||||
WITH pre_processed AS (SELECT error_id,
|
||||
toString(`$properties`.name) AS name,
|
||||
toString(`$properties`.message) AS message,
|
||||
session_id,
|
||||
|
|
@ -183,7 +183,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
AND `$event_name` = 'ERROR'
|
||||
AND events.created_at >= toDateTime(timestamp / 1000)
|
||||
AND events.created_at < toDateTime((timestamp + %(step_size24)s) / 1000)
|
||||
AND toString(`$properties`.error_id) = %(error_id)s
|
||||
AND error_id = %(error_id)s
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details
|
||||
) AS chart_details24 ON TRUE
|
||||
|
|
@ -196,7 +196,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
AND `$event_name` = 'ERROR'
|
||||
AND events.created_at >= toDateTime(timestamp / 1000)
|
||||
AND events.created_at < toDateTime((timestamp + %(step_size30)s) / 1000)
|
||||
AND toString(`$properties`.error_id) = %(error_id)s
|
||||
AND error_id = %(error_id)s
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details
|
||||
) AS chart_details30 ON TRUE;"""
|
||||
|
|
|
|||
|
|
@ -1,3 +1,16 @@
|
|||
SELECT 1
|
||||
FROM (SELECT throwIf(platform = 'ios', 'IOS sessions found')
|
||||
FROM experimental.sessions) AS raw
|
||||
LIMIT 1;
|
||||
|
||||
SELECT 1
|
||||
FROM (SELECT throwIf(platform = 'android', 'Android sessions found')
|
||||
FROM experimental.sessions) AS raw
|
||||
LIMIT 1;
|
||||
|
||||
ALTER TABLE experimental.sessions
|
||||
MODIFY COLUMN platform Enum8('web'=1,'mobile'=2) DEFAULT 'web';
|
||||
|
||||
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0-ee';
|
||||
|
||||
SET allow_experimental_json_type = 1;
|
||||
|
|
@ -151,8 +164,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, "$event_name", created_at, session_id)
|
||||
TTL _timestamp + INTERVAL 1 MONTH ,
|
||||
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||
|
||||
-- The list of events that should not be ingested,
|
||||
-- according to a specific event_name and optional properties
|
||||
|
|
|
|||
|
|
@ -9,8 +9,7 @@ CREATE TABLE IF NOT EXISTS experimental.autocomplete
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, type, value)
|
||||
TTL _timestamp + INTERVAL 1 MONTH;
|
||||
ORDER BY (project_id, type, value);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.events
|
||||
(
|
||||
|
|
@ -87,8 +86,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id);
|
||||
|
||||
|
||||
|
||||
|
|
@ -108,7 +106,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
|||
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
|
||||
user_city LowCardinality(String),
|
||||
user_state LowCardinality(String),
|
||||
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
|
||||
platform Enum8('web'=1,'mobile'=2) DEFAULT 'web',
|
||||
datetime DateTime,
|
||||
timezone LowCardinality(Nullable(String)),
|
||||
duration UInt32,
|
||||
|
|
@ -140,7 +138,6 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
|||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMMDD(datetime)
|
||||
ORDER BY (project_id, datetime, session_id)
|
||||
TTL datetime + INTERVAL 3 MONTH
|
||||
SETTINGS index_granularity = 512;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
||||
|
|
@ -152,8 +149,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_favorite_sessions
|
|||
sign Int8
|
||||
) ENGINE = CollapsingMergeTree(sign)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, user_id, session_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
||||
(
|
||||
|
|
@ -163,8 +159,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_sessions
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, session_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, user_id, session_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
||||
(
|
||||
|
|
@ -174,8 +169,7 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, user_id, error_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, user_id, error_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.issues
|
||||
(
|
||||
|
|
@ -188,8 +182,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, issue_id, type)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, issue_id, type);
|
||||
|
||||
|
||||
|
||||
|
|
@ -292,8 +285,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions_feature_flags
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, datetime, session_id, feature_flag_id, condition_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.ios_events
|
||||
(
|
||||
|
|
@ -329,8 +321,7 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id)
|
||||
TTL datetime + INTERVAL 3 MONTH;
|
||||
ORDER BY (project_id, datetime, event_type, session_id, message_id);
|
||||
|
||||
|
||||
SET allow_experimental_json_type = 1;
|
||||
|
|
@ -484,8 +475,7 @@ CREATE TABLE IF NOT EXISTS product_analytics.events
|
|||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
ORDER BY (project_id, "$event_name", created_at, session_id)
|
||||
TTL _timestamp + INTERVAL 1 MONTH ,
|
||||
_deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||
TTL _deleted_at + INTERVAL 1 DAY DELETE WHERE _deleted_at != '1970-01-01 00:00:00';
|
||||
|
||||
-- The list of events that should not be ingested,
|
||||
-- according to a specific event_name and optional properties
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
||||
import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
||||
import React, { Suspense, lazy } from 'react';
|
||||
import { Redirect, Route, Switch } from 'react-router-dom';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
|
|
@ -10,7 +9,7 @@ import { Loader } from 'UI';
|
|||
|
||||
import APIClient from './api_client';
|
||||
import * as routes from './routes';
|
||||
import { debounce } from '@/utils';
|
||||
import { debounceCall } from '@/utils';
|
||||
|
||||
const components: any = {
|
||||
SessionPure: lazy(() => import('Components/Session/Session')),
|
||||
|
|
@ -88,7 +87,6 @@ const ASSIST_PATH = routes.assist();
|
|||
const LIVE_SESSION_PATH = routes.liveSession();
|
||||
const MULTIVIEW_PATH = routes.multiview();
|
||||
const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
|
||||
const ASSIST_STATS_PATH = routes.assistStats();
|
||||
|
||||
const USABILITY_TESTING_PATH = routes.usabilityTesting();
|
||||
const USABILITY_TESTING_EDIT_PATH = routes.usabilityTestingEdit();
|
||||
|
|
@ -99,7 +97,6 @@ const SPOT_PATH = routes.spot();
|
|||
const SCOPE_SETUP = routes.scopeSetup();
|
||||
|
||||
const HIGHLIGHTS_PATH = routes.highlights();
|
||||
let debounceSearch: any = () => {};
|
||||
|
||||
function PrivateRoutes() {
|
||||
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
|
||||
|
|
@ -124,13 +121,9 @@ function PrivateRoutes() {
|
|||
}
|
||||
}, [siteId]);
|
||||
|
||||
React.useEffect(() => {
|
||||
debounceSearch = debounce(() => searchStore.fetchSessions(), 250);
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!searchStore.urlParsed) return;
|
||||
debounceSearch();
|
||||
debounceCall(() => searchStore.fetchSessions(true), 250)()
|
||||
}, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying';
|
|||
import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone';
|
||||
import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility';
|
||||
import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings';
|
||||
import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode';
|
||||
import DebugLog from './DebugLog';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
|
|
@ -35,6 +36,7 @@ function SessionsListingSettings() {
|
|||
<div className="flex flex-col gap-2">
|
||||
<MouseTrailSettings />
|
||||
<DebugLog />
|
||||
<VirtualModeSettings />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import CardSessionsByList from 'Components/Dashboard/Widgets/CardSessionsByList'
|
|||
import { useModal } from 'Components/ModalContext';
|
||||
import Widget from '@/mstore/types/widget';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { FilterKey } from 'Types/filter/filterType';
|
||||
|
||||
interface Props {
|
||||
metric?: any;
|
||||
|
|
@ -35,20 +36,20 @@ function SessionsBy(props: Props) {
|
|||
...filtersMap[metric.metricOf],
|
||||
value: [data.name],
|
||||
type: filtersMap[metric.metricOf].key,
|
||||
filters: filtersMap[metric.metricOf].filters?.map((f: any) => {
|
||||
const {
|
||||
key,
|
||||
operatorOptions,
|
||||
category,
|
||||
icon,
|
||||
label,
|
||||
options,
|
||||
...cleaned
|
||||
} = f;
|
||||
return { ...cleaned, type: f.key, value: [] };
|
||||
}),
|
||||
filters: [],
|
||||
};
|
||||
|
||||
if (metric.metricOf === FilterKey.FETCH) {
|
||||
baseFilter.filters = [
|
||||
{
|
||||
key: FilterKey.FETCH_URL,
|
||||
operator: 'is',
|
||||
value: [data.name],
|
||||
type: FilterKey.FETCH_URL,
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
const {
|
||||
key,
|
||||
operatorOptions,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ function BottomButtons({
|
|||
<Button
|
||||
loading={loading}
|
||||
type="primary"
|
||||
htmlType="submit"
|
||||
disabled={loading || !instance.validate()}
|
||||
id="submit-button"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ function ClickMapRagePicker() {
|
|||
<Checkbox onChange={onToggle} label={t('Include rage clicks')} />
|
||||
|
||||
<Button size="small" onClick={refreshHeatmapSession}>
|
||||
{t('Get new session')}
|
||||
{t('Get new image')}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ function DashboardView(props: Props) {
|
|||
};
|
||||
|
||||
useEffect(() => {
|
||||
dashboardStore.resetPeriod();
|
||||
if (queryParams.has('modal')) {
|
||||
onAddWidgets();
|
||||
trimQuery();
|
||||
|
|
|
|||
|
|
@ -117,8 +117,6 @@ const ListView: React.FC<Props> = ({
|
|||
if (disableSelection) {
|
||||
const path = withSiteId(`/metrics/${metric.metricId}`, siteId);
|
||||
history.push(path);
|
||||
} else {
|
||||
toggleSelection?.(metric.metricId);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -181,9 +181,10 @@ function WidgetChart(props: Props) {
|
|||
}
|
||||
prevMetricRef.current = _metric;
|
||||
const timestmaps = drillDownPeriod.toTimestamps();
|
||||
const density = props.isPreview ? metric.density : dashboardStore.selectedDensity
|
||||
const payload = isSaved
|
||||
? { ...metricParams }
|
||||
: { ...params, ...timestmaps, ..._metric.toJson() };
|
||||
? { ...metricParams, density }
|
||||
: { ...params, ...timestmaps, ..._metric.toJson(), density };
|
||||
debounceRequest(
|
||||
_metric,
|
||||
payload,
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ function RangeGranularity({
|
|||
}
|
||||
|
||||
const PAST_24_HR_MS = 24 * 60 * 60 * 1000;
|
||||
function calculateGranularities(periodDurationMs: number) {
|
||||
export function calculateGranularities(periodDurationMs: number) {
|
||||
const granularities = [
|
||||
{ label: 'Hourly', durationMs: 60 * 60 * 1000 },
|
||||
{ label: 'Daily', durationMs: 24 * 60 * 60 * 1000 },
|
||||
|
|
|
|||
|
|
@ -1,376 +1,395 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { NoContent, Loader, Pagination } from 'UI';
|
||||
import { Button, Tag, Tooltip, Dropdown, message } from 'antd';
|
||||
import { UndoOutlined, DownOutlined } from '@ant-design/icons';
|
||||
import React, {useEffect, useState} from 'react';
|
||||
import {NoContent, Loader, Pagination} from 'UI';
|
||||
import {Button, Tag, Tooltip, Dropdown, message} from 'antd';
|
||||
import {UndoOutlined, DownOutlined} from '@ant-design/icons';
|
||||
import cn from 'classnames';
|
||||
import { useStore } from 'App/mstore';
|
||||
import {useStore} from 'App/mstore';
|
||||
import SessionItem from 'Shared/SessionItem';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { DateTime } from 'luxon';
|
||||
import { debounce, numberWithCommas } from 'App/utils';
|
||||
import {observer} from 'mobx-react-lite';
|
||||
import {DateTime} from 'luxon';
|
||||
import {debounce, numberWithCommas} from 'App/utils';
|
||||
import useIsMounted from 'App/hooks/useIsMounted';
|
||||
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||
import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||
import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card';
|
||||
import {useTranslation} from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
className?: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
function WidgetSessions(props: Props) {
|
||||
const { t } = useTranslation();
|
||||
const listRef = React.useRef<HTMLDivElement>(null);
|
||||
const { className = '' } = props;
|
||||
const [activeSeries, setActiveSeries] = useState('all');
|
||||
const [data, setData] = useState<any>([]);
|
||||
const isMounted = useIsMounted();
|
||||
const [loading, setLoading] = useState(false);
|
||||
// all filtering done through series now
|
||||
const filteredSessions = getListSessionsBySeries(data, 'all');
|
||||
const { dashboardStore, metricStore, sessionStore, customFieldStore } =
|
||||
useStore();
|
||||
const focusedSeries = metricStore.focusedSeriesName;
|
||||
const filter = dashboardStore.drillDownFilter;
|
||||
const widget = metricStore.instance;
|
||||
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
|
||||
'LLL dd, yyyy HH:mm',
|
||||
);
|
||||
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
|
||||
'LLL dd, yyyy HH:mm',
|
||||
);
|
||||
const [seriesOptions, setSeriesOptions] = useState([
|
||||
{ label: t('All'), value: 'all' },
|
||||
]);
|
||||
const hasFilters =
|
||||
filter.filters.length > 0 ||
|
||||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
|
||||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
|
||||
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
|
||||
const metaList = customFieldStore.list.map((i: any) => i.key);
|
||||
const {t} = useTranslation();
|
||||
const listRef = React.useRef<HTMLDivElement>(null);
|
||||
const {className = ''} = props;
|
||||
const [activeSeries, setActiveSeries] = useState('all');
|
||||
const [data, setData] = useState<any>([]);
|
||||
const isMounted = useIsMounted();
|
||||
const [loading, setLoading] = useState(false);
|
||||
// all filtering done through series now
|
||||
const filteredSessions = getListSessionsBySeries(data, 'all');
|
||||
const {dashboardStore, metricStore, sessionStore, customFieldStore} =
|
||||
useStore();
|
||||
const focusedSeries = metricStore.focusedSeriesName;
|
||||
const filter = dashboardStore.drillDownFilter;
|
||||
const widget = metricStore.instance;
|
||||
const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat(
|
||||
'LLL dd, yyyy HH:mm',
|
||||
);
|
||||
const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat(
|
||||
'LLL dd, yyyy HH:mm',
|
||||
);
|
||||
const [seriesOptions, setSeriesOptions] = useState([
|
||||
{label: t('All'), value: 'all'},
|
||||
]);
|
||||
const hasFilters =
|
||||
filter.filters.length > 0 ||
|
||||
filter.startTimestamp !== dashboardStore.drillDownPeriod.start ||
|
||||
filter.endTimestamp !== dashboardStore.drillDownPeriod.end;
|
||||
const filterText = filter.filters.length > 0 ? filter.filters[0].value : '';
|
||||
const metaList = customFieldStore.list.map((i: any) => i.key);
|
||||
|
||||
const seriesDropdownItems = seriesOptions.map((option) => ({
|
||||
key: option.value,
|
||||
label: (
|
||||
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
|
||||
),
|
||||
}));
|
||||
|
||||
useEffect(() => {
|
||||
if (!widget.series) return;
|
||||
const seriesOptions = widget.series.map((item: any) => ({
|
||||
label: item.name,
|
||||
value: item.seriesId ?? item.name,
|
||||
const seriesDropdownItems = seriesOptions.map((option) => ({
|
||||
key: option.value,
|
||||
label: (
|
||||
<div onClick={() => setActiveSeries(option.value)}>{option.label}</div>
|
||||
),
|
||||
}));
|
||||
setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]);
|
||||
}, [widget.series.length]);
|
||||
|
||||
const fetchSessions = (metricId: any, filter: any) => {
|
||||
if (!isMounted()) return;
|
||||
setLoading(true);
|
||||
delete filter.eventsOrderSupport;
|
||||
if (widget.metricType === FUNNEL) {
|
||||
if (filter.series[0].filter.filters.length === 0) {
|
||||
setLoading(false);
|
||||
return setData([]);
|
||||
}
|
||||
}
|
||||
useEffect(() => {
|
||||
if (!widget.series) return;
|
||||
const seriesOptions = widget.series.map((item: any) => ({
|
||||
label: item.name,
|
||||
value: item.seriesId ?? item.name,
|
||||
}));
|
||||
setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]);
|
||||
}, [widget.series.length]);
|
||||
|
||||
widget
|
||||
.fetchSessions(metricId, filter)
|
||||
.then((res: any) => {
|
||||
setData(res);
|
||||
if (metricStore.drillDown) {
|
||||
setTimeout(() => {
|
||||
message.info(t('Sessions Refreshed!'));
|
||||
listRef.current?.scrollIntoView({ behavior: 'smooth' });
|
||||
metricStore.setDrillDown(false);
|
||||
}, 0);
|
||||
const fetchSessions = (metricId: any, filter: any) => {
|
||||
if (!isMounted()) return;
|
||||
|
||||
if (widget.metricType === FUNNEL) {
|
||||
if (filter.series[0].filter.filters.length === 0) {
|
||||
setLoading(false);
|
||||
return setData([]);
|
||||
}
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
setLoading(false);
|
||||
});
|
||||
};
|
||||
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
|
||||
sessionStore.getSessions(customFilters).then((data) => {
|
||||
setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]);
|
||||
});
|
||||
};
|
||||
const debounceRequest: any = React.useCallback(
|
||||
debounce(fetchSessions, 1000),
|
||||
[],
|
||||
);
|
||||
const debounceClickMapSearch = React.useCallback(
|
||||
debounce(fetchClickmapSessions, 1000),
|
||||
[],
|
||||
);
|
||||
|
||||
const depsString = JSON.stringify(widget.series);
|
||||
|
||||
const loadData = () => {
|
||||
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
|
||||
const clickFilter = {
|
||||
value: [metricStore.clickMapSearch],
|
||||
type: 'CLICK',
|
||||
operator: 'onSelector',
|
||||
isEvent: true,
|
||||
// @ts-ignore
|
||||
filters: [],
|
||||
};
|
||||
const timeRange = {
|
||||
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
|
||||
startDate: dashboardStore.drillDownPeriod.start,
|
||||
endDate: dashboardStore.drillDownPeriod.end,
|
||||
};
|
||||
const customFilter = {
|
||||
...filter,
|
||||
...timeRange,
|
||||
filters: [...sessionStore.userFilter.filters, clickFilter],
|
||||
};
|
||||
debounceClickMapSearch(customFilter);
|
||||
} else {
|
||||
const hasStartPoint =
|
||||
!!widget.startPoint && widget.metricType === USER_PATH;
|
||||
const onlyFocused = focusedSeries
|
||||
? widget.series.filter((s) => s.name === focusedSeries)
|
||||
: widget.series;
|
||||
const activeSeries = metricStore.disabledSeries.length
|
||||
? onlyFocused.filter(
|
||||
(s) => !metricStore.disabledSeries.includes(s.name),
|
||||
)
|
||||
: onlyFocused;
|
||||
const seriesJson = activeSeries.map((s) => s.toJson());
|
||||
if (hasStartPoint) {
|
||||
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
|
||||
}
|
||||
if (widget.metricType === USER_PATH) {
|
||||
if (
|
||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||
widget.data.nodes
|
||||
) {
|
||||
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
|
||||
} else if (
|
||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||
!widget.data.nodes?.length
|
||||
) {
|
||||
// no point requesting if we don't have starting point picked by api
|
||||
return;
|
||||
setLoading(true);
|
||||
const filterCopy = {...filter};
|
||||
delete filterCopy.eventsOrderSupport;
|
||||
|
||||
try {
|
||||
// Handle filters properly with null checks
|
||||
if (filterCopy.filters && filterCopy.filters.length > 0) {
|
||||
// Ensure the nested path exists before pushing
|
||||
if (filterCopy.series?.[0]?.filter) {
|
||||
if (!filterCopy.series[0].filter.filters) {
|
||||
filterCopy.series[0].filter.filters = [];
|
||||
}
|
||||
filterCopy.series[0].filter.filters.push(...filterCopy.filters);
|
||||
}
|
||||
filterCopy.filters = [];
|
||||
}
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
debounceRequest(widget.metricId, {
|
||||
...filter,
|
||||
series: seriesJson,
|
||||
page: metricStore.sessionsPage,
|
||||
limit: metricStore.sessionsPageSize,
|
||||
});
|
||||
}
|
||||
};
|
||||
useEffect(() => {
|
||||
metricStore.updateKey('sessionsPage', 1);
|
||||
loadData();
|
||||
}, [
|
||||
filter.startTimestamp,
|
||||
filter.endTimestamp,
|
||||
filter.filters,
|
||||
depsString,
|
||||
metricStore.clickMapSearch,
|
||||
focusedSeries,
|
||||
widget.startPoint,
|
||||
widget.data.nodes,
|
||||
metricStore.disabledSeries.length,
|
||||
]);
|
||||
useEffect(loadData, [metricStore.sessionsPage]);
|
||||
useEffect(() => {
|
||||
if (activeSeries === 'all') {
|
||||
metricStore.setFocusedSeriesName(null);
|
||||
} else {
|
||||
metricStore.setFocusedSeriesName(
|
||||
seriesOptions.find((option) => option.value === activeSeries)?.label,
|
||||
false,
|
||||
);
|
||||
}
|
||||
}, [activeSeries]);
|
||||
useEffect(() => {
|
||||
if (focusedSeries) {
|
||||
setActiveSeries(
|
||||
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
|
||||
'all',
|
||||
);
|
||||
} else {
|
||||
setActiveSeries('all');
|
||||
}
|
||||
}, [focusedSeries]);
|
||||
widget
|
||||
.fetchSessions(metricId, filterCopy)
|
||||
.then((res: any) => {
|
||||
setData(res);
|
||||
if (metricStore.drillDown) {
|
||||
setTimeout(() => {
|
||||
message.info(t('Sessions Refreshed!'));
|
||||
listRef.current?.scrollIntoView({behavior: 'smooth'});
|
||||
metricStore.setDrillDown(false);
|
||||
}, 0);
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
setLoading(false);
|
||||
});
|
||||
};
|
||||
const fetchClickmapSessions = (customFilters: Record<string, any>) => {
|
||||
sessionStore.getSessions(customFilters).then((data) => {
|
||||
setData([{...data, seriesId: 1, seriesName: 'Clicks'}]);
|
||||
});
|
||||
};
|
||||
const debounceRequest: any = React.useCallback(
|
||||
debounce(fetchSessions, 1000),
|
||||
[],
|
||||
);
|
||||
const debounceClickMapSearch = React.useCallback(
|
||||
debounce(fetchClickmapSessions, 1000),
|
||||
[],
|
||||
);
|
||||
|
||||
const clearFilters = () => {
|
||||
metricStore.updateKey('sessionsPage', 1);
|
||||
dashboardStore.resetDrillDownFilter();
|
||||
};
|
||||
const depsString = JSON.stringify(widget.series);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
className,
|
||||
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="flex items-baseline gap-2">
|
||||
<h2 className="text-xl">
|
||||
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
|
||||
</h2>
|
||||
<div className="ml-2 color-gray-medium">
|
||||
{metricStore.clickMapLabel
|
||||
? `on "${metricStore.clickMapLabel}" `
|
||||
: null}
|
||||
{t('between')}{' '}
|
||||
<span className="font-medium color-gray-darkest">
|
||||
const loadData = () => {
|
||||
if (widget.metricType === HEATMAP && metricStore.clickMapSearch) {
|
||||
const clickFilter = {
|
||||
value: [metricStore.clickMapSearch],
|
||||
type: 'CLICK',
|
||||
operator: 'onSelector',
|
||||
isEvent: true,
|
||||
// @ts-ignore
|
||||
filters: [],
|
||||
};
|
||||
const timeRange = {
|
||||
rangeValue: dashboardStore.drillDownPeriod.rangeValue,
|
||||
startDate: dashboardStore.drillDownPeriod.start,
|
||||
endDate: dashboardStore.drillDownPeriod.end,
|
||||
};
|
||||
const customFilter = {
|
||||
...filter,
|
||||
...timeRange,
|
||||
filters: [...sessionStore.userFilter.filters, clickFilter],
|
||||
};
|
||||
debounceClickMapSearch(customFilter);
|
||||
} else {
|
||||
const hasStartPoint =
|
||||
!!widget.startPoint && widget.metricType === USER_PATH;
|
||||
const onlyFocused = focusedSeries
|
||||
? widget.series.filter((s) => s.name === focusedSeries)
|
||||
: widget.series;
|
||||
const activeSeries = metricStore.disabledSeries.length
|
||||
? onlyFocused.filter(
|
||||
(s) => !metricStore.disabledSeries.includes(s.name),
|
||||
)
|
||||
: onlyFocused;
|
||||
const seriesJson = activeSeries.map((s) => s.toJson());
|
||||
if (hasStartPoint) {
|
||||
seriesJson[0].filter.filters.push(widget.startPoint.toJson());
|
||||
}
|
||||
if (widget.metricType === USER_PATH) {
|
||||
if (
|
||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||
widget.data.nodes?.length
|
||||
) {
|
||||
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
|
||||
} else if (
|
||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||
!widget.data.nodes?.length
|
||||
) {
|
||||
// no point requesting if we don't have starting point picked by api
|
||||
return;
|
||||
}
|
||||
}
|
||||
debounceRequest(widget.metricId, {
|
||||
...filter,
|
||||
series: seriesJson,
|
||||
page: metricStore.sessionsPage,
|
||||
limit: metricStore.sessionsPageSize,
|
||||
});
|
||||
}
|
||||
};
|
||||
useEffect(() => {
|
||||
metricStore.updateKey('sessionsPage', 1);
|
||||
loadData();
|
||||
}, [
|
||||
filter.startTimestamp,
|
||||
filter.endTimestamp,
|
||||
filter.filters,
|
||||
depsString,
|
||||
metricStore.clickMapSearch,
|
||||
focusedSeries,
|
||||
widget.startPoint,
|
||||
widget.data.nodes,
|
||||
metricStore.disabledSeries.length,
|
||||
]);
|
||||
useEffect(loadData, [metricStore.sessionsPage]);
|
||||
useEffect(() => {
|
||||
if (activeSeries === 'all') {
|
||||
metricStore.setFocusedSeriesName(null);
|
||||
} else {
|
||||
metricStore.setFocusedSeriesName(
|
||||
seriesOptions.find((option) => option.value === activeSeries)?.label,
|
||||
false,
|
||||
);
|
||||
}
|
||||
}, [activeSeries]);
|
||||
useEffect(() => {
|
||||
if (focusedSeries) {
|
||||
setActiveSeries(
|
||||
seriesOptions.find((option) => option.label === focusedSeries)?.value ||
|
||||
'all',
|
||||
);
|
||||
} else {
|
||||
setActiveSeries('all');
|
||||
}
|
||||
}, [focusedSeries]);
|
||||
|
||||
const clearFilters = () => {
|
||||
metricStore.updateKey('sessionsPage', 1);
|
||||
dashboardStore.resetDrillDownFilter();
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
className,
|
||||
'bg-white p-3 pb-0 rounded-xl shadow-sm border mt-3',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="flex items-baseline gap-2">
|
||||
<h2 className="text-xl">
|
||||
{metricStore.clickMapSearch ? t('Clicks') : t('Sessions')}
|
||||
</h2>
|
||||
<div className="ml-2 color-gray-medium">
|
||||
{metricStore.clickMapLabel
|
||||
? `on "${metricStore.clickMapLabel}" `
|
||||
: null}
|
||||
{t('between')}{' '}
|
||||
<span className="font-medium color-gray-darkest">
|
||||
{startTime}
|
||||
</span>{' '}
|
||||
{t('and')}{' '}
|
||||
<span className="font-medium color-gray-darkest">
|
||||
{t('and')}{' '}
|
||||
<span className="font-medium color-gray-darkest">
|
||||
{endTime}
|
||||
</span>{' '}
|
||||
</div>
|
||||
{hasFilters && (
|
||||
<Tooltip title={t('Clear Drilldown')} placement="top">
|
||||
<Button type="text" size="small" onClick={clearFilters}>
|
||||
<UndoOutlined />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{hasFilters && (
|
||||
<Tooltip title={t('Clear Drilldown')} placement="top">
|
||||
<Button type="text" size="small" onClick={clearFilters}>
|
||||
<UndoOutlined/>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hasFilters && widget.metricType === 'table' && (
|
||||
<div className="py-2">
|
||||
<Tag
|
||||
closable
|
||||
onClose={clearFilters}
|
||||
className="truncate max-w-44 rounded-lg"
|
||||
>
|
||||
{filterText}
|
||||
</Tag>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{hasFilters && widget.metricType === 'table' && (
|
||||
<div className="py-2">
|
||||
<Tag
|
||||
closable
|
||||
onClose={clearFilters}
|
||||
className="truncate max-w-44 rounded-lg"
|
||||
>
|
||||
{filterText}
|
||||
</Tag>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
|
||||
<div className="flex items-center ml-6">
|
||||
<div className="flex items-center gap-4">
|
||||
{widget.metricType !== 'table' && widget.metricType !== HEATMAP && (
|
||||
<div className="flex items-center ml-6">
|
||||
<span className="mr-2 color-gray-medium">
|
||||
{t('Filter by Series')}
|
||||
</span>
|
||||
<Dropdown
|
||||
menu={{
|
||||
items: seriesDropdownItems,
|
||||
selectable: true,
|
||||
selectedKeys: [activeSeries],
|
||||
}}
|
||||
trigger={['click']}
|
||||
>
|
||||
<Button type="text" size="small">
|
||||
{seriesOptions.find((option) => option.value === activeSeries)
|
||||
?.label || t('Select Series')}
|
||||
<DownOutlined />
|
||||
</Button>
|
||||
</Dropdown>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-3">
|
||||
<Loader loading={loading}>
|
||||
<NoContent
|
||||
title={
|
||||
<div className="flex items-center justify-center flex-col">
|
||||
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60} />
|
||||
<div className="mt-4" />
|
||||
<div className="text-center">
|
||||
{t('No relevant sessions found for the selected time period')}
|
||||
<Dropdown
|
||||
menu={{
|
||||
items: seriesDropdownItems,
|
||||
selectable: true,
|
||||
selectedKeys: [activeSeries],
|
||||
}}
|
||||
trigger={['click']}
|
||||
>
|
||||
<Button type="text" size="small">
|
||||
{seriesOptions.find((option) => option.value === activeSeries)
|
||||
?.label || t('Select Series')}
|
||||
<DownOutlined/>
|
||||
</Button>
|
||||
</Dropdown>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
show={filteredSessions.sessions.length === 0}
|
||||
>
|
||||
{filteredSessions.sessions.map((session: any) => (
|
||||
<React.Fragment key={session.sessionId}>
|
||||
<SessionItem
|
||||
disableUser
|
||||
session={session}
|
||||
metaList={metaList}
|
||||
/>
|
||||
<div className="border-b" />
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="flex items-center justify-between p-5"
|
||||
ref={listRef}
|
||||
>
|
||||
<div>
|
||||
{t('Showing')}{' '}
|
||||
<span className="font-medium">
|
||||
<div className="mt-3">
|
||||
<Loader loading={loading}>
|
||||
<NoContent
|
||||
title={
|
||||
<div className="flex items-center justify-center flex-col">
|
||||
<AnimatedSVG name={ICONS.NO_SESSIONS} size={60}/>
|
||||
<div className="mt-4"/>
|
||||
<div className="text-center">
|
||||
{t('No relevant sessions found for the selected time period')}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
show={filteredSessions.sessions.length === 0}
|
||||
>
|
||||
{filteredSessions.sessions.map((session: any) => (
|
||||
<React.Fragment key={session.sessionId}>
|
||||
<SessionItem
|
||||
disableUser
|
||||
session={session}
|
||||
metaList={metaList}
|
||||
/>
|
||||
<div className="border-b"/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
|
||||
<div
|
||||
className="flex items-center justify-between p-5"
|
||||
ref={listRef}
|
||||
>
|
||||
<div>
|
||||
{t('Showing')}{' '}
|
||||
<span className="font-medium">
|
||||
{(metricStore.sessionsPage - 1) *
|
||||
metricStore.sessionsPageSize +
|
||||
1}
|
||||
metricStore.sessionsPageSize +
|
||||
1}
|
||||
</span>{' '}
|
||||
{t('to')}{' '}
|
||||
<span className="font-medium">
|
||||
{t('to')}{' '}
|
||||
<span className="font-medium">
|
||||
{(metricStore.sessionsPage - 1) *
|
||||
metricStore.sessionsPageSize +
|
||||
filteredSessions.sessions.length}
|
||||
metricStore.sessionsPageSize +
|
||||
filteredSessions.sessions.length}
|
||||
</span>{' '}
|
||||
{t('of')}{' '}
|
||||
<span className="font-medium">
|
||||
{t('of')}{' '}
|
||||
<span className="font-medium">
|
||||
{numberWithCommas(filteredSessions.total)}
|
||||
</span>{' '}
|
||||
{t('sessions.')}
|
||||
</div>
|
||||
<Pagination
|
||||
page={metricStore.sessionsPage}
|
||||
total={filteredSessions.total}
|
||||
onPageChange={(page: any) =>
|
||||
metricStore.updateKey('sessionsPage', page)
|
||||
}
|
||||
limit={metricStore.sessionsPageSize}
|
||||
debounceRequest={500}
|
||||
/>
|
||||
{t('sessions.')}
|
||||
</div>
|
||||
<Pagination
|
||||
page={metricStore.sessionsPage}
|
||||
total={filteredSessions.total}
|
||||
onPageChange={(page: any) =>
|
||||
metricStore.updateKey('sessionsPage', page)
|
||||
}
|
||||
limit={metricStore.sessionsPageSize}
|
||||
debounceRequest={500}
|
||||
/>
|
||||
</div>
|
||||
</NoContent>
|
||||
</Loader>
|
||||
</div>
|
||||
</NoContent>
|
||||
</Loader>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const getListSessionsBySeries = (data: any, seriesId: any) => {
|
||||
const arr = data.reduce(
|
||||
(arr: any, element: any) => {
|
||||
if (seriesId === 'all') {
|
||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||
const sessions = element.sessions.filter(
|
||||
(i: any) => !sessionIds.includes(i.sessionId),
|
||||
);
|
||||
arr.sessions.push(...sessions);
|
||||
} else if (element.seriesId === seriesId) {
|
||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||
const sessions = element.sessions.filter(
|
||||
(i: any) => !sessionIds.includes(i.sessionId),
|
||||
);
|
||||
const duplicates = element.sessions.length - sessions.length;
|
||||
arr.sessions.push(...sessions);
|
||||
arr.total = element.total - duplicates;
|
||||
}
|
||||
return arr;
|
||||
},
|
||||
{ sessions: [] },
|
||||
);
|
||||
arr.total =
|
||||
seriesId === 'all'
|
||||
? Math.max(...data.map((i: any) => i.total))
|
||||
: data.find((i: any) => i.seriesId === seriesId).total;
|
||||
return arr;
|
||||
const arr = data.reduce(
|
||||
(arr: any, element: any) => {
|
||||
if (seriesId === 'all') {
|
||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||
const sessions = element.sessions.filter(
|
||||
(i: any) => !sessionIds.includes(i.sessionId),
|
||||
);
|
||||
arr.sessions.push(...sessions);
|
||||
} else if (element.seriesId === seriesId) {
|
||||
const sessionIds = arr.sessions.map((i: any) => i.sessionId);
|
||||
const sessions = element.sessions.filter(
|
||||
(i: any) => !sessionIds.includes(i.sessionId),
|
||||
);
|
||||
const duplicates = element.sessions.length - sessions.length;
|
||||
arr.sessions.push(...sessions);
|
||||
arr.total = element.total - duplicates;
|
||||
}
|
||||
return arr;
|
||||
},
|
||||
{sessions: []},
|
||||
);
|
||||
arr.total =
|
||||
seriesId === 'all'
|
||||
? Math.max(...data.map((i: any) => i.total))
|
||||
: data.find((i: any) => i.seriesId === seriesId).total;
|
||||
return arr;
|
||||
};
|
||||
|
||||
export default observer(WidgetSessions);
|
||||
|
|
|
|||
|
|
@ -92,6 +92,9 @@ function WidgetView({
|
|||
filter: { filters: selectedCard.filters },
|
||||
}),
|
||||
];
|
||||
} else if (selectedCard.cardType === TABLE) {
|
||||
cardData.series = [new FilterSeries()];
|
||||
cardData.series[0].filter.eventsOrder = 'and';
|
||||
}
|
||||
if (selectedCard.cardType === FUNNEL) {
|
||||
cardData.series = [new FilterSeries()];
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ function WidgetWrapperNew(props: Props & RouteComponentProps) {
|
|||
});
|
||||
|
||||
const onChartClick = () => {
|
||||
dashboardStore.setDrillDownPeriod(dashboardStore.period);
|
||||
// if (!isWidget || isPredefined) return;
|
||||
props.history.push(
|
||||
withSiteId(
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import {
|
|||
LikeFilled,
|
||||
LikeOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Tour, TourProps } from './.store/antd-virtual-7db13b4af6/package';
|
||||
import { Tour, TourProps } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ function DropdownAudioPlayer({
|
|||
return {
|
||||
url: data.url,
|
||||
timestamp: data.timestamp,
|
||||
start: startTs,
|
||||
start: Math.max(0, startTs),
|
||||
};
|
||||
}),
|
||||
[audioEvents.length, sessionStart],
|
||||
|
|
|
|||
|
|
@ -114,13 +114,11 @@ function PlayerBlockHeader(props: any) {
|
|||
)}
|
||||
|
||||
{_metaList.length > 0 && (
|
||||
<div className="h-full flex items-center px-2 gap-1">
|
||||
<SessionMetaList
|
||||
className=""
|
||||
metaList={_metaList}
|
||||
maxLength={2}
|
||||
/>
|
||||
</div>
|
||||
<SessionMetaList
|
||||
horizontal
|
||||
metaList={_metaList}
|
||||
maxLength={2}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -38,8 +38,8 @@ function WebPlayer(props: any) {
|
|||
uxtestingStore,
|
||||
uiPlayerStore,
|
||||
integrationsStore,
|
||||
userStore,
|
||||
} = useStore();
|
||||
const devTools = sessionStore.devTools
|
||||
const session = sessionStore.current;
|
||||
const { prefetched } = sessionStore;
|
||||
const startedAt = sessionStore.current.startedAt || 0;
|
||||
|
|
@ -57,14 +57,17 @@ function WebPlayer(props: any) {
|
|||
const [fullView, setFullView] = useState(false);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (windowActive) {
|
||||
const handleActivation = () => {
|
||||
if (!document.hidden) {
|
||||
setWindowActive(true);
|
||||
document.removeEventListener('visibilitychange', handleActivation);
|
||||
}
|
||||
};
|
||||
document.addEventListener('visibilitychange', handleActivation);
|
||||
const handleActivation = () => {
|
||||
if (!document.hidden) {
|
||||
setWindowActive(true);
|
||||
document.removeEventListener('visibilitychange', handleActivation);
|
||||
}
|
||||
};
|
||||
document.addEventListener('visibilitychange', handleActivation);
|
||||
|
||||
return () => {
|
||||
devTools.update('network', { activeTab: 'ALL' });
|
||||
document.removeEventListener('visibilitychange', handleActivation);
|
||||
}
|
||||
}, []);
|
||||
|
||||
|
|
|
|||
|
|
@ -169,6 +169,6 @@ function TabChange({ from, to, activeUrl, onClick }) {
|
|||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export default observer(EventGroupWrapper);
|
||||
|
|
@ -4,17 +4,17 @@ import cn from 'classnames';
|
|||
import { observer } from 'mobx-react-lite';
|
||||
import React from 'react';
|
||||
import { VList, VListHandle } from 'virtua';
|
||||
import { Button } from 'antd'
|
||||
import { Button } from 'antd';
|
||||
import { PlayerContext } from 'App/components/Session/playerContext';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { Icon } from 'UI';
|
||||
import { Search } from 'lucide-react'
|
||||
import { Search } from 'lucide-react';
|
||||
import EventGroupWrapper from './EventGroupWrapper';
|
||||
import EventSearch from './EventSearch/EventSearch';
|
||||
import styles from './eventsBlock.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CloseOutlined } from ".store/@ant-design-icons-virtual-42686020c5/package";
|
||||
import { Tooltip } from ".store/antd-virtual-9dbfadb7f6/package";
|
||||
import { CloseOutlined } from "@ant-design/icons";
|
||||
import { Tooltip } from "antd";
|
||||
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
|
||||
|
||||
interface IProps {
|
||||
|
|
@ -25,7 +25,7 @@ const MODES = {
|
|||
SELECT: 'select',
|
||||
SEARCH: 'search',
|
||||
EXPORT: 'export',
|
||||
}
|
||||
};
|
||||
|
||||
function EventsBlock(props: IProps) {
|
||||
const defaultFramework = getDefaultFramework();
|
||||
|
|
@ -95,7 +95,7 @@ function EventsBlock(props: IProps) {
|
|||
? e.time >= zoomStartTs && e.time <= zoomEndTs
|
||||
: false
|
||||
: true,
|
||||
);
|
||||
);
|
||||
}, [
|
||||
filteredLength,
|
||||
notesWithEvtsLength,
|
||||
|
|
@ -126,6 +126,7 @@ function EventsBlock(props: IProps) {
|
|||
},
|
||||
[usedEvents, time, endTime],
|
||||
);
|
||||
|
||||
const currentTimeEventIndex = findLastFitting(time);
|
||||
|
||||
const write = ({
|
||||
|
|
@ -182,6 +183,7 @@ function EventsBlock(props: IProps) {
|
|||
const isTabChange = 'type' in event && event.type === 'TABCHANGE';
|
||||
const isCurrent = index === currentTimeEventIndex;
|
||||
const isPrev = index < currentTimeEventIndex;
|
||||
|
||||
return (
|
||||
<EventGroupWrapper
|
||||
query={query}
|
||||
|
|
@ -249,12 +251,14 @@ function EventsBlock(props: IProps) {
|
|||
onClick={() => setMode(MODES.SEARCH)}
|
||||
>
|
||||
<Search size={14} />
|
||||
<div>{t('Search')} {usedEvents.length} {t('events')}</div>
|
||||
<div>
|
||||
{t('Search')} {usedEvents.length} {t('events')}
|
||||
</div>
|
||||
</Button>
|
||||
<Tooltip title={t('Close Panel')} placement='bottom' >
|
||||
<Tooltip title={t('Close Panel')} placement="bottom">
|
||||
<Button
|
||||
className="ml-auto"
|
||||
type='text'
|
||||
type="text"
|
||||
onClick={() => {
|
||||
setActiveTab('');
|
||||
}}
|
||||
|
|
@ -263,19 +267,23 @@ function EventsBlock(props: IProps) {
|
|||
</Tooltip>
|
||||
</div>
|
||||
) : null}
|
||||
{mode === MODES.SEARCH ?
|
||||
{mode === MODES.SEARCH ? (
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<EventSearch
|
||||
onChange={write}
|
||||
setActiveTab={setActiveTab}
|
||||
value={query}
|
||||
eventsText={
|
||||
usedEvents.length ? `${usedEvents.length} ${t('Events')}` : `0 ${t('Events')}`
|
||||
usedEvents.length
|
||||
? `${usedEvents.length} ${t('Events')}`
|
||||
: `0 ${t('Events')}`
|
||||
}
|
||||
/>
|
||||
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>{t('Cancel')}</Button>
|
||||
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>
|
||||
{t('Cancel')}
|
||||
</Button>
|
||||
</div>
|
||||
: null}
|
||||
) : null}
|
||||
</div>
|
||||
<div
|
||||
className={cn('flex-1 pb-4', styles.eventsList)}
|
||||
|
|
|
|||
|
|
@ -6,9 +6,11 @@ import {
|
|||
import { observer } from 'mobx-react-lite';
|
||||
import stl from './timeline.module.css';
|
||||
import { getTimelinePosition } from './getTimelinePosition';
|
||||
import { useStore } from '@/mstore';
|
||||
|
||||
function EventsList() {
|
||||
const { store } = useContext(PlayerContext);
|
||||
const { uiPlayerStore } = useStore();
|
||||
|
||||
const { eventCount, endTime } = store.get();
|
||||
const { tabStates } = store.get();
|
||||
|
|
@ -17,7 +19,6 @@ function EventsList() {
|
|||
() => Object.values(tabStates)[0]?.eventList.filter((e) => e.time) || [],
|
||||
[eventCount],
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
const hasDuplicates = events.some(
|
||||
(e, i) =>
|
||||
|
|
|
|||
|
|
@ -49,7 +49,6 @@
|
|||
z-index: 2;
|
||||
}
|
||||
|
||||
|
||||
.event {
|
||||
position: absolute;
|
||||
width: 2px;
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ function SubHeader(props) {
|
|||
projectsStore,
|
||||
userStore,
|
||||
issueReportingStore,
|
||||
settingsStore
|
||||
} = useStore();
|
||||
const { t } = useTranslation();
|
||||
const { favorite } = sessionStore.current;
|
||||
|
|
@ -45,7 +46,7 @@ function SubHeader(props) {
|
|||
const currentSession = sessionStore.current;
|
||||
const projectId = projectsStore.siteId;
|
||||
const integrations = integrationsStore.issues.list;
|
||||
const { store } = React.useContext(PlayerContext);
|
||||
const { player, store } = React.useContext(PlayerContext);
|
||||
const { location: currentLocation = 'loading...' } = store.get();
|
||||
const hasIframe = localStorage.getItem(IFRAME) === 'true';
|
||||
const [hideTools, setHideTools] = React.useState(false);
|
||||
|
|
@ -127,6 +128,13 @@ function SubHeader(props) {
|
|||
});
|
||||
};
|
||||
|
||||
const showVModeBadge = store.get().vModeBadge;
|
||||
const onVMode = () => {
|
||||
settingsStore.sessionSettings.updateKey('virtualMode', true);
|
||||
player.enableVMode?.();
|
||||
location.reload();
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
|
|
@ -143,6 +151,8 @@ function SubHeader(props) {
|
|||
siteId={projectId!}
|
||||
currentLocation={currentLocation}
|
||||
version={currentSession?.trackerVersion ?? ''}
|
||||
virtualElsFailed={showVModeBadge}
|
||||
onVMode={onVMode}
|
||||
/>
|
||||
|
||||
<SessionTabs />
|
||||
|
|
|
|||
|
|
@ -34,38 +34,46 @@ const WarnBadge = React.memo(
|
|||
currentLocation,
|
||||
version,
|
||||
siteId,
|
||||
virtualElsFailed,
|
||||
onVMode,
|
||||
}: {
|
||||
currentLocation: string;
|
||||
version: string;
|
||||
siteId: string;
|
||||
virtualElsFailed: boolean;
|
||||
onVMode: () => void;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const localhostWarnSiteKey = localhostWarn(siteId);
|
||||
const defaultLocalhostWarn =
|
||||
localStorage.getItem(localhostWarnSiteKey) !== '1';
|
||||
const localhostWarnActive =
|
||||
const localhostWarnActive = Boolean(
|
||||
currentLocation &&
|
||||
defaultLocalhostWarn &&
|
||||
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation);
|
||||
/(localhost)|(127.0.0.1)|(0.0.0.0)/.test(currentLocation)
|
||||
)
|
||||
const trackerVersion = window.env.TRACKER_VERSION ?? undefined;
|
||||
const trackerVerDiff = compareVersions(version, trackerVersion);
|
||||
const trackerWarnActive = trackerVerDiff !== VersionComparison.Same;
|
||||
|
||||
const [showLocalhostWarn, setLocalhostWarn] =
|
||||
React.useState(localhostWarnActive);
|
||||
const [showTrackerWarn, setTrackerWarn] = React.useState(trackerWarnActive);
|
||||
const [warnings, setWarnings] = React.useState<[localhostWarn: boolean, trackerWarn: boolean, virtualElsFailWarn: boolean]>([localhostWarnActive, trackerWarnActive, virtualElsFailed])
|
||||
|
||||
const closeWarning = (type: 1 | 2) => {
|
||||
React.useEffect(() => {
|
||||
setWarnings([localhostWarnActive, trackerWarnActive, virtualElsFailed])
|
||||
}, [localhostWarnActive, trackerWarnActive, virtualElsFailed])
|
||||
|
||||
const closeWarning = (type: 0 | 1 | 2) => {
|
||||
if (type === 1) {
|
||||
localStorage.setItem(localhostWarnSiteKey, '1');
|
||||
setLocalhostWarn(false);
|
||||
}
|
||||
if (type === 2) {
|
||||
setTrackerWarn(false);
|
||||
}
|
||||
setWarnings((prev) => {
|
||||
const newWarnings = [...prev];
|
||||
newWarnings[type] = false;
|
||||
return newWarnings;
|
||||
});
|
||||
};
|
||||
|
||||
if (!showLocalhostWarn && !showTrackerWarn) return null;
|
||||
if (!warnings.some(el => el === true)) return null;
|
||||
|
||||
return (
|
||||
<div
|
||||
|
|
@ -79,7 +87,7 @@ const WarnBadge = React.memo(
|
|||
fontWeight: 500,
|
||||
}}
|
||||
>
|
||||
{showLocalhostWarn ? (
|
||||
{warnings[0] ? (
|
||||
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
|
||||
<div>
|
||||
<span>{t('Some assets may load incorrectly on localhost.')}</span>
|
||||
|
|
@ -101,7 +109,7 @@ const WarnBadge = React.memo(
|
|||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
{showTrackerWarn ? (
|
||||
{warnings[1] ? (
|
||||
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
|
||||
<div>
|
||||
<div>
|
||||
|
|
@ -125,6 +133,21 @@ const WarnBadge = React.memo(
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="py-1 ml-3 cursor-pointer"
|
||||
onClick={() => closeWarning(1)}
|
||||
>
|
||||
<Icon name="close" size={16} color="black" />
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
{warnings[2] ? (
|
||||
<div className="px-3 py-1 border border-gray-lighter drop-shadow-md rounded bg-active-blue flex items-center justify-between">
|
||||
<div className="flex flex-col">
|
||||
<div>{t('If you have issues displaying custom HTML elements (i.e when using LWC), consider turning on Virtual Mode.')}</div>
|
||||
<div className='link' onClick={onVMode}>{t('Enable')}</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className="py-1 ml-3 cursor-pointer"
|
||||
onClick={() => closeWarning(2)}
|
||||
|
|
|
|||
|
|
@ -12,60 +12,123 @@ import {
|
|||
getDateRangeFromValue,
|
||||
getDateRangeLabel,
|
||||
} from 'App/dateRange';
|
||||
import { DateTime, Interval } from 'luxon';
|
||||
import { DateTime, Interval, Settings } from 'luxon';
|
||||
|
||||
import styles from './dateRangePopup.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function DateRangePopup(props: any) {
|
||||
const { t } = useTranslation();
|
||||
const [displayDates, setDisplayDates] = React.useState<[Date, Date]>([new Date(), new Date()]);
|
||||
const [range, setRange] = React.useState(
|
||||
props.selectedDateRange ||
|
||||
Interval.fromDateTimes(DateTime.now(), DateTime.now()),
|
||||
);
|
||||
const [value, setValue] = React.useState<string | null>(null);
|
||||
|
||||
const selectCustomRange = (range) => {
|
||||
let newRange;
|
||||
if (props.singleDay) {
|
||||
newRange = Interval.fromDateTimes(
|
||||
DateTime.fromJSDate(range),
|
||||
DateTime.fromJSDate(range),
|
||||
);
|
||||
} else {
|
||||
newRange = Interval.fromDateTimes(
|
||||
DateTime.fromJSDate(range[0]),
|
||||
DateTime.fromJSDate(range[1]),
|
||||
);
|
||||
}
|
||||
setRange(newRange);
|
||||
React.useEffect(() => {
|
||||
if (props.selectedDateRange) {
|
||||
const start = new Date(
|
||||
props.selectedDateRange.start.year,
|
||||
props.selectedDateRange.start.month - 1, // JS months are 0-based
|
||||
props.selectedDateRange.start.day
|
||||
);
|
||||
const end = new Date(
|
||||
props.selectedDateRange.end.year,
|
||||
props.selectedDateRange.end.month - 1,
|
||||
props.selectedDateRange.end.day
|
||||
);
|
||||
setDisplayDates([start, end]);
|
||||
}
|
||||
}, [props.selectedDateRange]);
|
||||
|
||||
const createNaiveTime = (dateTime: DateTime) => {
|
||||
if (!dateTime) return null;
|
||||
return DateTime.fromObject({
|
||||
hour: dateTime.hour,
|
||||
minute: dateTime.minute
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
const selectCustomRange = (newDates: [Date, Date]) => {
|
||||
if (!newDates || !newDates[0] || !newDates[1]) return;
|
||||
|
||||
setDisplayDates(newDates);
|
||||
|
||||
const selectedTzStart = DateTime.fromObject({
|
||||
year: newDates[0].getFullYear(),
|
||||
month: newDates[0].getMonth() + 1,
|
||||
day: newDates[0].getDate(),
|
||||
hour: 0,
|
||||
minute: 0
|
||||
}).setZone(Settings.defaultZone);
|
||||
|
||||
const selectedTzEnd = DateTime.fromObject({
|
||||
year: newDates[1].getFullYear(),
|
||||
month: newDates[1].getMonth() + 1,
|
||||
day: newDates[1].getDate(),
|
||||
hour: 23,
|
||||
minute: 59
|
||||
}).setZone(Settings.defaultZone);
|
||||
|
||||
const updatedRange = Interval.fromDateTimes(selectedTzStart, selectedTzEnd);
|
||||
setRange(updatedRange);
|
||||
setValue(CUSTOM_RANGE);
|
||||
};
|
||||
|
||||
const setRangeTimeStart = (value: DateTime) => {
|
||||
if (!range.end || value > range.end) {
|
||||
return;
|
||||
}
|
||||
const newRange = range.start.set({
|
||||
hour: value.hour,
|
||||
minute: value.minute,
|
||||
const setRangeTimeStart = (naiveTime: DateTime) => {
|
||||
if (!range.end || !naiveTime) return;
|
||||
|
||||
const newStart = range.start.set({
|
||||
hour: naiveTime.hour,
|
||||
minute: naiveTime.minute
|
||||
});
|
||||
setRange(Interval.fromDateTimes(newRange, range.end));
|
||||
|
||||
if (newStart > range.end) return;
|
||||
|
||||
setRange(Interval.fromDateTimes(newStart, range.end));
|
||||
setValue(CUSTOM_RANGE);
|
||||
};
|
||||
|
||||
const setRangeTimeEnd = (value: DateTime) => {
|
||||
if (!range.start || (value && value < range.start)) {
|
||||
return;
|
||||
}
|
||||
const newRange = range.end.set({ hour: value.hour, minute: value.minute });
|
||||
setRange(Interval.fromDateTimes(range.start, newRange));
|
||||
const setRangeTimeEnd = (naiveTime: DateTime) => {
|
||||
if (!range.start || !naiveTime) return;
|
||||
|
||||
const newEnd = range.end.set({
|
||||
hour: naiveTime.hour,
|
||||
minute: naiveTime.minute
|
||||
});
|
||||
|
||||
if (newEnd < range.start) return;
|
||||
|
||||
setRange(Interval.fromDateTimes(range.start, newEnd));
|
||||
setValue(CUSTOM_RANGE);
|
||||
};
|
||||
|
||||
const selectValue = (value: string) => {
|
||||
const range = getDateRangeFromValue(value);
|
||||
setRange(range);
|
||||
const newRange = getDateRangeFromValue(value);
|
||||
|
||||
if (!newRange.start || !newRange.end) {
|
||||
setRange(Interval.fromDateTimes(DateTime.now(), DateTime.now()));
|
||||
setDisplayDates([new Date(), new Date()]);
|
||||
setValue(null);
|
||||
return;
|
||||
}
|
||||
const zonedStart = newRange.start.setZone(Settings.defaultZone);
|
||||
const zonedEnd = newRange.end.setZone(Settings.defaultZone);
|
||||
setRange(Interval.fromDateTimes(zonedStart, zonedEnd));
|
||||
|
||||
const start = new Date(
|
||||
zonedStart.year,
|
||||
zonedStart.month - 1,
|
||||
zonedStart.day
|
||||
);
|
||||
const end = new Date(
|
||||
zonedEnd.year,
|
||||
zonedEnd.month - 1,
|
||||
zonedEnd.day
|
||||
);
|
||||
setDisplayDates([start, end]);
|
||||
setValue(value);
|
||||
};
|
||||
|
||||
|
|
@ -77,9 +140,9 @@ function DateRangePopup(props: any) {
|
|||
const isUSLocale =
|
||||
navigator.language === 'en-US' || navigator.language.startsWith('en-US');
|
||||
|
||||
const rangeForDisplay = props.singleDay
|
||||
? range.start.ts
|
||||
: [range.start!.startOf('day').ts, range.end!.startOf('day').ts];
|
||||
const naiveStartTime = createNaiveTime(range.start);
|
||||
const naiveEndTime = createNaiveTime(range.end);
|
||||
|
||||
return (
|
||||
<div className={styles.wrapper}>
|
||||
<div className={`${styles.body} h-fit`}>
|
||||
|
|
@ -103,7 +166,7 @@ function DateRangePopup(props: any) {
|
|||
shouldCloseCalendar={() => false}
|
||||
isOpen
|
||||
maxDate={new Date()}
|
||||
value={rangeForDisplay}
|
||||
value={displayDates}
|
||||
calendarProps={{
|
||||
tileDisabled: props.isTileDisabled,
|
||||
selectRange: !props.singleDay,
|
||||
|
|
@ -122,7 +185,7 @@ function DateRangePopup(props: any) {
|
|||
<span>{range.start.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
|
||||
<TimePicker
|
||||
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
|
||||
value={range.start}
|
||||
value={naiveStartTime}
|
||||
onChange={setRangeTimeStart}
|
||||
needConfirm={false}
|
||||
showNow={false}
|
||||
|
|
@ -132,7 +195,7 @@ function DateRangePopup(props: any) {
|
|||
<span>{range.end.toFormat(isUSLocale ? 'MM/dd' : 'dd/MM')} </span>
|
||||
<TimePicker
|
||||
format={isUSLocale ? 'hh:mm a' : 'HH:mm'}
|
||||
value={range.end}
|
||||
value={naiveEndTime}
|
||||
onChange={setRangeTimeEnd}
|
||||
needConfirm={false}
|
||||
showNow={false}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,17 @@
|
|||
/* eslint-disable i18next/no-literal-string */
|
||||
import { ResourceType, Timed } from 'Player';
|
||||
import { WsChannel } from 'Player/web/messages';
|
||||
import MobilePlayer from 'Player/mobile/IOSPlayer';
|
||||
import WebPlayer from 'Player/web/WebPlayer';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import React, {
|
||||
useMemo,
|
||||
useState,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import i18n from 'App/i18n'
|
||||
|
||||
import { useModal } from 'App/components/Modal';
|
||||
import {
|
||||
|
|
@ -12,25 +20,27 @@ import {
|
|||
} from 'App/components/Session/playerContext';
|
||||
import { formatMs } from 'App/date';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { formatBytes } from 'App/utils';
|
||||
import { formatBytes, debounceCall } from 'App/utils';
|
||||
import { Icon, NoContent, Tabs } from 'UI';
|
||||
import { Tooltip, Input, Switch, Form } from 'antd';
|
||||
import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
SearchOutlined,
|
||||
InfoCircleOutlined,
|
||||
} from '@ant-design/icons';
|
||||
|
||||
import FetchDetailsModal from 'Shared/FetchDetailsModal';
|
||||
import { WsChannel } from 'App/player/web/messages';
|
||||
|
||||
import BottomBlock from '../BottomBlock';
|
||||
import InfoLine from '../BottomBlock/InfoLine';
|
||||
import TabSelector from '../TabSelector';
|
||||
import TimeTable from '../TimeTable';
|
||||
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
|
||||
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
|
||||
import WSPanel from './WSPanel';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { mergeListsWithZoom, processInChunks } from './utils'
|
||||
|
||||
// Constants remain the same
|
||||
const INDEX_KEY = 'network';
|
||||
|
||||
const ALL = 'ALL';
|
||||
const XHR = 'xhr';
|
||||
const JS = 'js';
|
||||
|
|
@ -62,6 +72,9 @@ export const NETWORK_TABS = TAP_KEYS.map((tab) => ({
|
|||
const DOM_LOADED_TIME_COLOR = 'teal';
|
||||
const LOAD_TIME_COLOR = 'red';
|
||||
|
||||
const BATCH_SIZE = 2500;
|
||||
const INITIAL_LOAD_SIZE = 5000;
|
||||
|
||||
export function renderType(r: any) {
|
||||
return (
|
||||
<Tooltip style={{ width: '100%' }} title={<div>{r.type}</div>}>
|
||||
|
|
@ -79,13 +92,17 @@ export function renderName(r: any) {
|
|||
}
|
||||
|
||||
function renderSize(r: any) {
|
||||
const { t } = useTranslation();
|
||||
if (r.responseBodySize) return formatBytes(r.responseBodySize);
|
||||
const t = i18n.t;
|
||||
const notCaptured = t('Not captured');
|
||||
const resSizeStr = t('Resource size')
|
||||
let triggerText;
|
||||
let content;
|
||||
if (r.decodedBodySize == null || r.decodedBodySize === 0) {
|
||||
if (r.responseBodySize) {
|
||||
triggerText = formatBytes(r.responseBodySize);
|
||||
content = undefined;
|
||||
} else if (r.decodedBodySize == null || r.decodedBodySize === 0) {
|
||||
triggerText = 'x';
|
||||
content = t('Not captured');
|
||||
content = notCaptured;
|
||||
} else {
|
||||
const headerSize = r.headerSize || 0;
|
||||
const showTransferred = r.headerSize != null;
|
||||
|
|
@ -100,7 +117,7 @@ function renderSize(r: any) {
|
|||
)} transferred over network`}
|
||||
</li>
|
||||
)}
|
||||
<li>{`${t('Resource size')}: ${formatBytes(r.decodedBodySize)} `}</li>
|
||||
<li>{`${resSizeStr}: ${formatBytes(r.decodedBodySize)} `}</li>
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
|
|
@ -168,6 +185,8 @@ function renderStatus({
|
|||
);
|
||||
}
|
||||
|
||||
|
||||
// Main component for Network Panel
|
||||
function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
||||
const { player, store } = React.useContext(PlayerContext);
|
||||
const { sessionStore, uiPlayerStore } = useStore();
|
||||
|
|
@ -216,6 +235,7 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
|
||||
const getTabNum = (tab: string) => tabsArr.findIndex((t) => t === tab) + 1;
|
||||
const getTabName = (tabId: string) => tabNames[tabId];
|
||||
|
||||
return (
|
||||
<NetworkPanelComp
|
||||
loadTime={loadTime}
|
||||
|
|
@ -228,8 +248,8 @@ function NetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
resourceListNow={resourceListNow}
|
||||
player={player}
|
||||
startedAt={startedAt}
|
||||
websocketList={websocketList as WSMessage[]}
|
||||
websocketListNow={websocketListNow as WSMessage[]}
|
||||
websocketList={websocketList}
|
||||
websocketListNow={websocketListNow}
|
||||
getTabNum={getTabNum}
|
||||
getTabName={getTabName}
|
||||
showSingleTab={showSingleTab}
|
||||
|
|
@ -269,9 +289,7 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
resourceListNow={resourceListNow}
|
||||
player={player}
|
||||
startedAt={startedAt}
|
||||
// @ts-ignore
|
||||
websocketList={websocketList}
|
||||
// @ts-ignore
|
||||
websocketListNow={websocketListNow}
|
||||
zoomEnabled={zoomEnabled}
|
||||
zoomStartTs={zoomStartTs}
|
||||
|
|
@ -280,12 +298,35 @@ function MobileNetworkPanelCont({ panelHeight }: { panelHeight: number }) {
|
|||
);
|
||||
}
|
||||
|
||||
type WSMessage = Timed & {
|
||||
channelName: string;
|
||||
data: string;
|
||||
timestamp: number;
|
||||
dir: 'up' | 'down';
|
||||
messageType: string;
|
||||
const useInfiniteScroll = (loadMoreCallback: () => void, hasMore: boolean) => {
|
||||
const observerRef = useRef<IntersectionObserver>(null);
|
||||
const loadingRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0]?.isIntersecting && hasMore) {
|
||||
loadMoreCallback();
|
||||
}
|
||||
},
|
||||
{ threshold: 0.1 },
|
||||
);
|
||||
|
||||
if (loadingRef.current) {
|
||||
observer.observe(loadingRef.current);
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
observerRef.current = observer;
|
||||
|
||||
return () => {
|
||||
if (observerRef.current) {
|
||||
observerRef.current.disconnect();
|
||||
}
|
||||
};
|
||||
}, [loadMoreCallback, hasMore, loadingRef]);
|
||||
|
||||
return loadingRef;
|
||||
};
|
||||
|
||||
interface Props {
|
||||
|
|
@ -302,8 +343,8 @@ interface Props {
|
|||
resourceList: Timed[];
|
||||
fetchListNow: Timed[];
|
||||
resourceListNow: Timed[];
|
||||
websocketList: Array<WSMessage>;
|
||||
websocketListNow: Array<WSMessage>;
|
||||
websocketList: Array<WsChannel>;
|
||||
websocketListNow: Array<WsChannel>;
|
||||
player: WebPlayer | MobilePlayer;
|
||||
startedAt: number;
|
||||
isMobile?: boolean;
|
||||
|
|
@ -349,107 +390,189 @@ export const NetworkPanelComp = observer(
|
|||
>(null);
|
||||
const { showModal } = useModal();
|
||||
const [showOnlyErrors, setShowOnlyErrors] = useState(false);
|
||||
|
||||
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const [displayedItems, setDisplayedItems] = useState([]);
|
||||
const [totalItems, setTotalItems] = useState(0);
|
||||
const [summaryStats, setSummaryStats] = useState({
|
||||
resourcesSize: 0,
|
||||
transferredSize: 0,
|
||||
});
|
||||
|
||||
const originalListRef = useRef([]);
|
||||
const socketListRef = useRef([]);
|
||||
|
||||
const {
|
||||
sessionStore: { devTools },
|
||||
} = useStore();
|
||||
const { filter } = devTools[INDEX_KEY];
|
||||
const { activeTab } = devTools[INDEX_KEY];
|
||||
const activeIndex = activeOutsideIndex ?? devTools[INDEX_KEY].index;
|
||||
const [inputFilterValue, setInputFilterValue] = useState(filter);
|
||||
|
||||
const socketList = useMemo(
|
||||
() =>
|
||||
websocketList.filter(
|
||||
(ws, i, arr) =>
|
||||
arr.findIndex((it) => it.channelName === ws.channelName) === i,
|
||||
),
|
||||
[websocketList],
|
||||
const debouncedFilter = useCallback(
|
||||
debounceCall((filterValue) => {
|
||||
devTools.update(INDEX_KEY, { filter: filterValue });
|
||||
}, 300),
|
||||
[],
|
||||
);
|
||||
|
||||
const list = useMemo(
|
||||
() =>
|
||||
// TODO: better merge (with body size info) - do it in player
|
||||
resourceList
|
||||
.filter(
|
||||
(res) =>
|
||||
!fetchList.some((ft) => {
|
||||
// res.url !== ft.url doesn't work on relative URLs appearing within fetchList (to-fix in player)
|
||||
if (res.name === ft.name) {
|
||||
if (res.time === ft.time) return true;
|
||||
if (res.url.includes(ft.url)) {
|
||||
return (
|
||||
Math.abs(res.time - ft.time) < 350 ||
|
||||
Math.abs(res.timestamp - ft.timestamp) < 350
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (res.name !== ft.name) {
|
||||
return false;
|
||||
}
|
||||
if (Math.abs(res.time - ft.time) > 250) {
|
||||
return false;
|
||||
} // TODO: find good epsilons
|
||||
if (Math.abs(res.duration - ft.duration) > 200) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}),
|
||||
)
|
||||
.concat(fetchList)
|
||||
.concat(
|
||||
socketList.map((ws) => ({
|
||||
...ws,
|
||||
type: 'websocket',
|
||||
method: 'ws',
|
||||
url: ws.channelName,
|
||||
name: ws.channelName,
|
||||
status: '101',
|
||||
duration: 0,
|
||||
transferredBodySize: 0,
|
||||
})),
|
||||
)
|
||||
.filter((req) =>
|
||||
zoomEnabled
|
||||
? req.time >= zoomStartTs! && req.time <= zoomEndTs!
|
||||
: true,
|
||||
)
|
||||
.sort((a, b) => a.time - b.time),
|
||||
[resourceList.length, fetchList.length, socketList.length],
|
||||
);
|
||||
|
||||
let filteredList = useMemo(() => {
|
||||
if (!showOnlyErrors) {
|
||||
return list;
|
||||
}
|
||||
return list.filter(
|
||||
(it) => parseInt(it.status) >= 400 || !it.success || it.error,
|
||||
// Process socket lists once
|
||||
useEffect(() => {
|
||||
const uniqueSocketList = websocketList.filter(
|
||||
(ws, i, arr) =>
|
||||
arr.findIndex((it) => it.channelName === ws.channelName) === i,
|
||||
);
|
||||
}, [showOnlyErrors, list]);
|
||||
filteredList = useRegExListFilterMemo(
|
||||
filteredList,
|
||||
(it) => [it.status, it.name, it.type, it.method],
|
||||
filter,
|
||||
);
|
||||
filteredList = useTabListFilterMemo(
|
||||
filteredList,
|
||||
(it) => TYPE_TO_TAB[it.type],
|
||||
ALL,
|
||||
activeTab,
|
||||
);
|
||||
socketListRef.current = uniqueSocketList;
|
||||
}, [websocketList.length]);
|
||||
|
||||
const onTabClick = (activeTab: (typeof TAP_KEYS)[number]) =>
|
||||
// Initial data processing - do this only once when data changes
|
||||
useEffect(() => {
|
||||
setIsLoading(true);
|
||||
|
||||
// Heaviest operation here, will create a final merged network list
|
||||
const processData = async () => {
|
||||
const fetchUrls = new Set(
|
||||
fetchList.map((ft) => {
|
||||
return `${ft.name}-${Math.floor(ft.time / 100)}-${Math.floor(ft.duration / 100)}`;
|
||||
}),
|
||||
);
|
||||
|
||||
// We want to get resources that aren't in fetch list
|
||||
const filteredResources = await processInChunks(resourceList, (chunk) =>
|
||||
chunk.filter((res: any) => {
|
||||
const key = `${res.name}-${Math.floor(res.time / 100)}-${Math.floor(res.duration / 100)}`;
|
||||
return !fetchUrls.has(key);
|
||||
}),
|
||||
BATCH_SIZE,
|
||||
25,
|
||||
);
|
||||
|
||||
const processedSockets = socketListRef.current.map((ws: any) => ({
|
||||
...ws,
|
||||
type: 'websocket',
|
||||
method: 'ws',
|
||||
url: ws.channelName,
|
||||
name: ws.channelName,
|
||||
status: '101',
|
||||
duration: 0,
|
||||
transferredBodySize: 0,
|
||||
}));
|
||||
|
||||
const mergedList: Timed[] = mergeListsWithZoom(
|
||||
filteredResources as Timed[],
|
||||
fetchList,
|
||||
processedSockets as Timed[],
|
||||
{ enabled: Boolean(zoomEnabled), start: zoomStartTs ?? 0, end: zoomEndTs ?? 0 }
|
||||
)
|
||||
|
||||
originalListRef.current = mergedList;
|
||||
setTotalItems(mergedList.length);
|
||||
|
||||
calculateResourceStats(resourceList);
|
||||
|
||||
// Only display initial chunk
|
||||
setDisplayedItems(mergedList.slice(0, INITIAL_LOAD_SIZE));
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
void processData();
|
||||
}, [
|
||||
resourceList.length,
|
||||
fetchList.length,
|
||||
socketListRef.current.length,
|
||||
zoomEnabled,
|
||||
zoomStartTs,
|
||||
zoomEndTs,
|
||||
]);
|
||||
|
||||
const calculateResourceStats = (resourceList: Record<string, any>) => {
|
||||
setTimeout(() => {
|
||||
let resourcesSize = 0
|
||||
let transferredSize = 0
|
||||
resourceList.forEach(({ decodedBodySize, headerSize, encodedBodySize }: any) => {
|
||||
resourcesSize += decodedBodySize || 0
|
||||
transferredSize += (headerSize || 0) + (encodedBodySize || 0)
|
||||
})
|
||||
|
||||
setSummaryStats({
|
||||
resourcesSize,
|
||||
transferredSize,
|
||||
});
|
||||
}, 0);
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (originalListRef.current.length === 0) return;
|
||||
setIsProcessing(true);
|
||||
const applyFilters = async () => {
|
||||
let filteredItems: any[] = originalListRef.current;
|
||||
|
||||
filteredItems = await processInChunks(filteredItems, (chunk) =>
|
||||
chunk.filter(
|
||||
(it) => {
|
||||
let valid = true;
|
||||
if (showOnlyErrors) {
|
||||
valid = parseInt(it.status) >= 400 || !it.success || it.error
|
||||
}
|
||||
if (filter) {
|
||||
try {
|
||||
const regex = new RegExp(filter, 'i');
|
||||
valid = valid && regex.test(it.status) || regex.test(it.name) || regex.test(it.type) || regex.test(it.method);
|
||||
} catch (e) {
|
||||
valid = valid && String(it.status).includes(filter) || it.name.includes(filter) || it.type.includes(filter) || (it.method && it.method.includes(filter));
|
||||
}
|
||||
}
|
||||
if (activeTab !== ALL) {
|
||||
valid = valid && TYPE_TO_TAB[it.type] === activeTab;
|
||||
}
|
||||
|
||||
return valid;
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
// Update displayed items
|
||||
setDisplayedItems(filteredItems.slice(0, INITIAL_LOAD_SIZE));
|
||||
setTotalItems(filteredItems.length);
|
||||
setIsProcessing(false);
|
||||
};
|
||||
|
||||
void applyFilters();
|
||||
}, [filter, activeTab, showOnlyErrors]);
|
||||
|
||||
const loadMoreItems = useCallback(() => {
|
||||
if (isProcessing) return;
|
||||
|
||||
setIsProcessing(true);
|
||||
setTimeout(() => {
|
||||
setDisplayedItems((prevItems) => {
|
||||
const currentLength = prevItems.length;
|
||||
const newItems = originalListRef.current.slice(
|
||||
currentLength,
|
||||
currentLength + BATCH_SIZE,
|
||||
);
|
||||
return [...prevItems, ...newItems];
|
||||
});
|
||||
setIsProcessing(false);
|
||||
}, 10);
|
||||
}, [isProcessing]);
|
||||
|
||||
const hasMoreItems = displayedItems.length < totalItems;
|
||||
const loadingRef = useInfiniteScroll(loadMoreItems, hasMoreItems);
|
||||
|
||||
const onTabClick = (activeTab) => {
|
||||
devTools.update(INDEX_KEY, { activeTab });
|
||||
const onFilterChange = ({
|
||||
target: { value },
|
||||
}: React.ChangeEvent<HTMLInputElement>) =>
|
||||
devTools.update(INDEX_KEY, { filter: value });
|
||||
};
|
||||
|
||||
const onFilterChange = ({ target: { value } }) => {
|
||||
setInputFilterValue(value)
|
||||
debouncedFilter(value);
|
||||
};
|
||||
|
||||
// AutoScroll
|
||||
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
|
||||
filteredList,
|
||||
displayedItems,
|
||||
getLastItemTime(fetchListNow, resourceListNow),
|
||||
activeIndex,
|
||||
(index) => devTools.update(INDEX_KEY, { index }),
|
||||
|
|
@ -462,24 +585,6 @@ export const NetworkPanelComp = observer(
|
|||
timeoutStartAutoscroll();
|
||||
};
|
||||
|
||||
const resourcesSize = useMemo(
|
||||
() =>
|
||||
resourceList.reduce(
|
||||
(sum, { decodedBodySize }) => sum + (decodedBodySize || 0),
|
||||
0,
|
||||
),
|
||||
[resourceList.length],
|
||||
);
|
||||
const transferredSize = useMemo(
|
||||
() =>
|
||||
resourceList.reduce(
|
||||
(sum, { headerSize, encodedBodySize }) =>
|
||||
sum + (headerSize || 0) + (encodedBodySize || 0),
|
||||
0,
|
||||
),
|
||||
[resourceList.length],
|
||||
);
|
||||
|
||||
const referenceLines = useMemo(() => {
|
||||
const arr = [];
|
||||
|
||||
|
|
@ -513,7 +618,7 @@ export const NetworkPanelComp = observer(
|
|||
isSpot={isSpot}
|
||||
time={item.time + startedAt}
|
||||
resource={item}
|
||||
rows={filteredList}
|
||||
rows={displayedItems}
|
||||
fetchPresented={fetchList.length > 0}
|
||||
/>,
|
||||
{
|
||||
|
|
@ -525,12 +630,10 @@ export const NetworkPanelComp = observer(
|
|||
},
|
||||
},
|
||||
);
|
||||
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
|
||||
stopAutoscroll();
|
||||
};
|
||||
|
||||
const tableCols = React.useMemo(() => {
|
||||
const cols: any[] = [
|
||||
const tableCols = useMemo(() => {
|
||||
const cols = [
|
||||
{
|
||||
label: t('Status'),
|
||||
dataKey: 'status',
|
||||
|
|
@ -585,7 +688,7 @@ export const NetworkPanelComp = observer(
|
|||
});
|
||||
}
|
||||
return cols;
|
||||
}, [showSingleTab]);
|
||||
}, [showSingleTab, activeTab, t, getTabName, getTabNum, isSpot]);
|
||||
|
||||
return (
|
||||
<BottomBlock
|
||||
|
|
@ -617,7 +720,7 @@ export const NetworkPanelComp = observer(
|
|||
name="filter"
|
||||
onChange={onFilterChange}
|
||||
width={280}
|
||||
value={filter}
|
||||
value={inputFilterValue}
|
||||
size="small"
|
||||
prefix={<SearchOutlined className="text-neutral-400" />}
|
||||
/>
|
||||
|
|
@ -625,7 +728,7 @@ export const NetworkPanelComp = observer(
|
|||
</BottomBlock.Header>
|
||||
<BottomBlock.Content>
|
||||
<div className="flex items-center justify-between px-4 border-b bg-teal/5 h-8">
|
||||
<div>
|
||||
<div className="flex items-center">
|
||||
<Form.Item name="show-errors-only" className="mb-0">
|
||||
<label
|
||||
style={{
|
||||
|
|
@ -642,21 +745,29 @@ export const NetworkPanelComp = observer(
|
|||
<span className="text-sm ms-2">4xx-5xx Only</span>
|
||||
</label>
|
||||
</Form.Item>
|
||||
|
||||
{isProcessing && (
|
||||
<span className="text-xs text-gray-500 ml-4">
|
||||
Processing data...
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<InfoLine>
|
||||
<InfoLine.Point label={`${totalItems}`} value="requests" />
|
||||
<InfoLine.Point
|
||||
label={`${filteredList.length}`}
|
||||
value=" requests"
|
||||
label={`${displayedItems.length}/${totalItems}`}
|
||||
value="displayed"
|
||||
display={displayedItems.length < totalItems}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatBytes(transferredSize)}
|
||||
label={formatBytes(summaryStats.transferredSize)}
|
||||
value="transferred"
|
||||
display={transferredSize > 0}
|
||||
display={summaryStats.transferredSize > 0}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatBytes(resourcesSize)}
|
||||
label={formatBytes(summaryStats.resourcesSize)}
|
||||
value="resources"
|
||||
display={resourcesSize > 0}
|
||||
display={summaryStats.resourcesSize > 0}
|
||||
/>
|
||||
<InfoLine.Point
|
||||
label={formatMs(domBuildingTime)}
|
||||
|
|
@ -679,42 +790,67 @@ export const NetworkPanelComp = observer(
|
|||
/>
|
||||
</InfoLine>
|
||||
</div>
|
||||
<NoContent
|
||||
title={
|
||||
<div className="capitalize flex items-center gap-2">
|
||||
<InfoCircleOutlined size={18} />
|
||||
{t('No Data')}
|
||||
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center h-full">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-gray-900 mx-auto mb-2"></div>
|
||||
<p>Processing initial network data...</p>
|
||||
</div>
|
||||
}
|
||||
size="small"
|
||||
show={filteredList.length === 0}
|
||||
>
|
||||
{/* @ts-ignore */}
|
||||
<TimeTable
|
||||
rows={filteredList}
|
||||
tableHeight={panelHeight - 102}
|
||||
referenceLines={referenceLines}
|
||||
renderPopup
|
||||
onRowClick={showDetailsModal}
|
||||
sortBy="time"
|
||||
sortAscending
|
||||
onJump={(row: any) => {
|
||||
devTools.update(INDEX_KEY, {
|
||||
index: filteredList.indexOf(row),
|
||||
});
|
||||
player.jump(row.time);
|
||||
}}
|
||||
activeIndex={activeIndex}
|
||||
</div>
|
||||
) : (
|
||||
<NoContent
|
||||
title={
|
||||
<div className="capitalize flex items-center gap-2">
|
||||
<InfoCircleOutlined size={18} />
|
||||
{t('No Data')}
|
||||
</div>
|
||||
}
|
||||
size="small"
|
||||
show={displayedItems.length === 0}
|
||||
>
|
||||
{tableCols}
|
||||
</TimeTable>
|
||||
{selectedWsChannel ? (
|
||||
<WSPanel
|
||||
socketMsgList={selectedWsChannel}
|
||||
onClose={() => setSelectedWsChannel(null)}
|
||||
/>
|
||||
) : null}
|
||||
</NoContent>
|
||||
<div>
|
||||
<TimeTable
|
||||
rows={displayedItems}
|
||||
tableHeight={panelHeight - 102 - (hasMoreItems ? 30 : 0)}
|
||||
referenceLines={referenceLines}
|
||||
renderPopup
|
||||
onRowClick={showDetailsModal}
|
||||
sortBy="time"
|
||||
sortAscending
|
||||
onJump={(row) => {
|
||||
devTools.update(INDEX_KEY, {
|
||||
index: displayedItems.indexOf(row),
|
||||
});
|
||||
player.jump(row.time);
|
||||
}}
|
||||
activeIndex={activeIndex}
|
||||
>
|
||||
{tableCols}
|
||||
</TimeTable>
|
||||
|
||||
{hasMoreItems && (
|
||||
<div
|
||||
ref={loadingRef}
|
||||
className="flex justify-center items-center text-xs text-gray-500"
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-gray-600 mr-2"></div>
|
||||
Loading more data ({totalItems - displayedItems.length}{' '}
|
||||
remaining)
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{selectedWsChannel ? (
|
||||
<WSPanel
|
||||
socketMsgList={selectedWsChannel}
|
||||
onClose={() => setSelectedWsChannel(null)}
|
||||
/>
|
||||
) : null}
|
||||
</NoContent>
|
||||
)}
|
||||
</BottomBlock.Content>
|
||||
</BottomBlock>
|
||||
);
|
||||
|
|
@ -722,7 +858,6 @@ export const NetworkPanelComp = observer(
|
|||
);
|
||||
|
||||
const WebNetworkPanel = observer(NetworkPanelCont);
|
||||
|
||||
const MobileNetworkPanel = observer(MobileNetworkPanelCont);
|
||||
|
||||
export { WebNetworkPanel, MobileNetworkPanel };
|
||||
|
|
|
|||
178
frontend/app/components/shared/DevTools/NetworkPanel/utils.ts
Normal file
178
frontend/app/components/shared/DevTools/NetworkPanel/utils.ts
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
export function mergeListsWithZoom<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(
|
||||
arr1: T[],
|
||||
arr2: Y[],
|
||||
arr3: Z[],
|
||||
zoom?: { enabled: boolean; start: number; end: number },
|
||||
): Array<T | Y | Z> {
|
||||
// Early return for empty arrays
|
||||
if (arr1.length === 0 && arr2.length === 0 && arr3.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Optimized for common case - no zoom
|
||||
if (!zoom?.enabled) {
|
||||
return mergeThreeSortedArrays(arr1, arr2, arr3);
|
||||
}
|
||||
|
||||
// Binary search for start indexes (faster than linear search for large arrays)
|
||||
const index1 = binarySearchStartIndex(arr1, zoom.start);
|
||||
const index2 = binarySearchStartIndex(arr2, zoom.start);
|
||||
const index3 = binarySearchStartIndex(arr3, zoom.start);
|
||||
|
||||
// Merge arrays within zoom range
|
||||
return mergeThreeSortedArraysWithinRange(
|
||||
arr1,
|
||||
arr2,
|
||||
arr3,
|
||||
index1,
|
||||
index2,
|
||||
index3,
|
||||
zoom.start,
|
||||
zoom.end,
|
||||
);
|
||||
}
|
||||
|
||||
function binarySearchStartIndex<T extends Record<string, any>>(
|
||||
arr: T[],
|
||||
threshold: number,
|
||||
): number {
|
||||
if (arr.length === 0) return 0;
|
||||
|
||||
let low = 0;
|
||||
let high = arr.length - 1;
|
||||
|
||||
// Handle edge cases first for better performance
|
||||
if (arr[high].time < threshold) return arr.length;
|
||||
if (arr[low].time >= threshold) return 0;
|
||||
|
||||
while (low <= high) {
|
||||
const mid = Math.floor((low + high) / 2);
|
||||
|
||||
if (arr[mid].time < threshold) {
|
||||
low = mid + 1;
|
||||
} else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
|
||||
return low;
|
||||
}
|
||||
|
||||
function mergeThreeSortedArrays<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(arr1: T[], arr2: Y[], arr3: Z[]): Array<T | Y | Z> {
|
||||
const totalLength = arr1.length + arr2.length + arr3.length;
|
||||
// prealloc array size
|
||||
const result = new Array(totalLength);
|
||||
|
||||
let i = 0,
|
||||
j = 0,
|
||||
k = 0,
|
||||
index = 0;
|
||||
|
||||
while (i < arr1.length || j < arr2.length || k < arr3.length) {
|
||||
const val1 = i < arr1.length ? arr1[i].time : Infinity;
|
||||
const val2 = j < arr2.length ? arr2[j].time : Infinity;
|
||||
const val3 = k < arr3.length ? arr3[k].time : Infinity;
|
||||
|
||||
if (val1 <= val2 && val1 <= val3) {
|
||||
result[index++] = arr1[i++];
|
||||
} else if (val2 <= val1 && val2 <= val3) {
|
||||
result[index++] = arr2[j++];
|
||||
} else {
|
||||
result[index++] = arr3[k++];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// same as above, just with zoom stuff
|
||||
function mergeThreeSortedArraysWithinRange<
|
||||
T extends Record<string, any>,
|
||||
Y extends Record<string, any>,
|
||||
Z extends Record<string, any>,
|
||||
>(
|
||||
arr1: T[],
|
||||
arr2: Y[],
|
||||
arr3: Z[],
|
||||
startIdx1: number,
|
||||
startIdx2: number,
|
||||
startIdx3: number,
|
||||
start: number,
|
||||
end: number,
|
||||
): Array<T | Y | Z> {
|
||||
// we don't know beforehand how many items will be there
|
||||
const result = [];
|
||||
|
||||
let i = startIdx1;
|
||||
let j = startIdx2;
|
||||
let k = startIdx3;
|
||||
|
||||
while (i < arr1.length || j < arr2.length || k < arr3.length) {
|
||||
const val1 = i < arr1.length ? arr1[i].time : Infinity;
|
||||
const val2 = j < arr2.length ? arr2[j].time : Infinity;
|
||||
const val3 = k < arr3.length ? arr3[k].time : Infinity;
|
||||
|
||||
// Early termination: if all remaining values exceed end time
|
||||
if (Math.min(val1, val2, val3) > end) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (val1 <= val2 && val1 <= val3) {
|
||||
if (val1 <= end) {
|
||||
result.push(arr1[i]);
|
||||
}
|
||||
i++;
|
||||
} else if (val2 <= val1 && val2 <= val3) {
|
||||
if (val2 <= end) {
|
||||
result.push(arr2[j]);
|
||||
}
|
||||
j++;
|
||||
} else {
|
||||
if (val3 <= end) {
|
||||
result.push(arr3[k]);
|
||||
}
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function processInChunks(
|
||||
items: any[],
|
||||
processFn: (item: any) => any,
|
||||
chunkSize = 1000,
|
||||
overscan = 0,
|
||||
) {
|
||||
return new Promise((resolve) => {
|
||||
if (items.length === 0) {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
let result: any[] = [];
|
||||
let index = 0;
|
||||
|
||||
const processNextChunk = () => {
|
||||
const chunk = items.slice(index, index + chunkSize + overscan);
|
||||
result = result.concat(processFn(chunk));
|
||||
index += chunkSize;
|
||||
|
||||
if (index < items.length) {
|
||||
setTimeout(processNextChunk, 0);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
};
|
||||
|
||||
processNextChunk();
|
||||
});
|
||||
}
|
||||
|
|
@ -5,6 +5,7 @@ import cn from 'classnames';
|
|||
import { Loader } from 'UI';
|
||||
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { VList } from 'virtua';
|
||||
|
||||
function TruncatedText({
|
||||
text,
|
||||
|
|
@ -124,7 +125,7 @@ export function AutocompleteModal({
|
|||
if (index === blocksAmount - 1 && blocksAmount > 1) {
|
||||
str += ' and ';
|
||||
}
|
||||
str += `"${block.trim()}"`;
|
||||
str += block.trim();
|
||||
if (index < blocksAmount - 2) {
|
||||
str += ', ';
|
||||
}
|
||||
|
|
@ -170,25 +171,27 @@ export function AutocompleteModal({
|
|||
<>
|
||||
<div
|
||||
className="flex flex-col gap-2 overflow-y-auto py-2 overflow-x-hidden text-ellipsis"
|
||||
style={{ maxHeight: 200 }}
|
||||
style={{ height: Math.min(sortedOptions.length * 32, 240) }}
|
||||
>
|
||||
{sortedOptions.map((item) => (
|
||||
<div
|
||||
key={item.value}
|
||||
onClick={() => onSelectOption(item)}
|
||||
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
|
||||
>
|
||||
<Checkbox checked={isSelected(item)} /> {item.label}
|
||||
</div>
|
||||
))}
|
||||
<VList count={sortedOptions.length} itemSize={18}>
|
||||
{sortedOptions.map((item) => (
|
||||
<div
|
||||
key={item.value}
|
||||
onClick={() => onSelectOption(item)}
|
||||
className="cursor-pointer w-full py-1 hover:bg-active-blue rounded px-2"
|
||||
>
|
||||
<Checkbox checked={isSelected(item)} /> {item.label}
|
||||
</div>
|
||||
))}
|
||||
</VList>
|
||||
</div>
|
||||
{query.length ? (
|
||||
<div className="border-y border-y-gray-light py-2">
|
||||
<div
|
||||
className="whitespace-normal rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
|
||||
className="whitespace-nowrap truncate w-full rounded cursor-pointer text-teal hover:bg-active-blue px-2 py-1"
|
||||
onClick={applyQuery}
|
||||
>
|
||||
{t('Apply')} {queryStr}
|
||||
{t('Apply')} <span className='font-semibold'>{queryStr}</span>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
|
|
|
|||
|
|
@ -128,8 +128,10 @@ const FilterAutoComplete = observer(
|
|||
};
|
||||
|
||||
const handleFocus = () => {
|
||||
if (!initialFocus) {
|
||||
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
|
||||
}
|
||||
setInitialFocus(true);
|
||||
setOptions(topValues.map((i) => ({ value: i.value, label: i.value })));
|
||||
};
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -19,11 +19,13 @@ export default function MetaItem(props: Props) {
|
|||
<TextEllipsis
|
||||
text={label}
|
||||
className="p-0"
|
||||
maxWidth={'300px'}
|
||||
popupProps={{ size: 'small', disabled: true }}
|
||||
/>
|
||||
<span className="bg-neutral-200 inline-block w-[1px] min-h-[17px]"></span>
|
||||
<TextEllipsis
|
||||
text={value}
|
||||
maxWidth={'350px'}
|
||||
className="p-0 text-neutral-500"
|
||||
popupProps={{ size: 'small', disabled: true }}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -7,13 +7,15 @@ interface Props {
|
|||
className?: string;
|
||||
metaList: any[];
|
||||
maxLength?: number;
|
||||
onMetaClick?: (meta: { name: string, value: string }) => void;
|
||||
horizontal?: boolean;
|
||||
}
|
||||
|
||||
export default function SessionMetaList(props: Props) {
|
||||
const { className = '', metaList, maxLength = 14 } = props;
|
||||
const { className = '', metaList, maxLength = 14, horizontal = false } = props;
|
||||
|
||||
return (
|
||||
<div className={cn('flex items-center flex-wrap gap-1', className)}>
|
||||
<div className={cn('flex items-center gap-1', horizontal ? '' : 'flex-wrap', className)}>
|
||||
{metaList.slice(0, maxLength).map(({ label, value }, index) => (
|
||||
<React.Fragment key={index}>
|
||||
<MetaItem label={label} value={`${value}`} />
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import ListingVisibility from './components/ListingVisibility';
|
|||
import DefaultPlaying from './components/DefaultPlaying';
|
||||
import DefaultTimezone from './components/DefaultTimezone';
|
||||
import CaptureRate from './components/CaptureRate';
|
||||
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function SessionSettings() {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
import React from 'react';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { Switch } from 'UI';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function VirtualModeSettings() {
|
||||
const { settingsStore } = useStore();
|
||||
const { sessionSettings } = settingsStore;
|
||||
const { virtualMode } = sessionSettings;
|
||||
const { t } = useTranslation();
|
||||
|
||||
const updateSettings = (checked: boolean) => {
|
||||
settingsStore.sessionSettings.updateKey('virtualMode', !virtualMode);
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h3 className="text-lg">{t('Virtual Mode')}</h3>
|
||||
<div className="my-1">
|
||||
{t('Change this setting if you have issues with recordings containing Lightning Web Components (or similar custom HTML Element libraries).')}
|
||||
</div>
|
||||
<div className="mt-2">
|
||||
<Switch onChange={updateSettings} checked={virtualMode} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default observer(VirtualModeSettings);
|
||||
|
|
@ -9,6 +9,7 @@ export const GLOBAL_HAS_NO_RECORDINGS = '__$global-hasNoRecordings$__';
|
|||
export const SITE_ID_STORAGE_KEY = '__$user-siteId$__';
|
||||
export const GETTING_STARTED = '__$user-gettingStarted$__';
|
||||
export const MOUSE_TRAIL = '__$session-mouseTrail$__';
|
||||
export const VIRTUAL_MODE_KEY = '__$session-virtualMode$__'
|
||||
export const IFRAME = '__$session-iframe$__';
|
||||
export const JWT_PARAM = '__$session-jwt-param$__';
|
||||
export const MENU_COLLAPSED = '__$global-menuCollapsed$__';
|
||||
|
|
|
|||
|
|
@ -503,7 +503,7 @@
|
|||
"Returning users between": "Returning users between",
|
||||
"Sessions": "Sessions",
|
||||
"No recordings found.": "No recordings found.",
|
||||
"Get new session": "Get new session",
|
||||
"Get new image": "Get new image",
|
||||
"The number of cards in one dashboard is limited to 30.": "The number of cards in one dashboard is limited to 30.",
|
||||
"Add Card": "Add Card",
|
||||
"Create Dashboard": "Create Dashboard",
|
||||
|
|
|
|||
|
|
@ -503,7 +503,7 @@
|
|||
"Returning users between": "Usuarios recurrentes entre",
|
||||
"Sessions": "Sesiones",
|
||||
"No recordings found.": "No se encontraron grabaciones.",
|
||||
"Get new session": "Obtener nueva sesión",
|
||||
"Get new image": "Obtener nueva sesión",
|
||||
"The number of cards in one dashboard is limited to 30.": "El número de tarjetas en un panel está limitado a 30.",
|
||||
"Add Card": "Agregar tarjeta",
|
||||
"Create Dashboard": "Crear panel",
|
||||
|
|
|
|||
|
|
@ -503,7 +503,7 @@
|
|||
"Returning users between": "Utilisateurs récurrents entre",
|
||||
"Sessions": "Sessions",
|
||||
"No recordings found.": "Aucun enregistrement trouvé.",
|
||||
"Get new session": "Obtenir une nouvelle session",
|
||||
"Get new image": "Obtenir une nouvelle session",
|
||||
"The number of cards in one dashboard is limited to 30.": "Le nombre de cartes dans un tableau de bord est limité à 30.",
|
||||
"Add Card": "Ajouter une carte",
|
||||
"Create Dashboard": "Créer un tableau de bord",
|
||||
|
|
|
|||
|
|
@ -504,7 +504,7 @@
|
|||
"Returning users between": "Возвращающиеся пользователи за период",
|
||||
"Sessions": "Сессии",
|
||||
"No recordings found.": "Записей не найдено.",
|
||||
"Get new session": "Получить новую сессию",
|
||||
"Get new image": "Получить новую сессию",
|
||||
"The number of cards in one dashboard is limited to 30.": "Количество карточек в одном дашборде ограничено 30.",
|
||||
"Add Card": "Добавить карточку",
|
||||
"Create Dashboard": "Создать дашборд",
|
||||
|
|
@ -1498,5 +1498,8 @@
|
|||
"More attribute": "Еще атрибут",
|
||||
"More attributes": "Еще атрибуты",
|
||||
"Account settings updated successfully": "Настройки аккаунта успешно обновлены",
|
||||
"Include rage clicks": "Включить невыносимые клики"
|
||||
}
|
||||
"Include rage clicks": "Включить невыносимые клики",
|
||||
"Interface Language": "Язык интерфейса",
|
||||
"Select the language in which OpenReplay will appear.": "Выберите язык, на котором будет отображаться OpenReplay.",
|
||||
"Language": "Язык"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -503,7 +503,7 @@
|
|||
"Returning users between": "回访用户区间",
|
||||
"Sessions": "会话",
|
||||
"No recordings found.": "未找到录制。",
|
||||
"Get new session": "获取新会话",
|
||||
"Get new image": "获取新会话",
|
||||
"The number of cards in one dashboard is limited to 30.": "一个仪表板最多可包含30个卡片。",
|
||||
"Add Card": "添加卡片",
|
||||
"Create Dashboard": "创建仪表板",
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import { makeAutoObservable, runInAction } from 'mobx';
|
||||
import { makeAutoObservable, runInAction, reaction } from 'mobx';
|
||||
import { dashboardService, metricService } from 'App/services';
|
||||
import { toast } from 'react-toastify';
|
||||
import Period, { LAST_24_HOURS, LAST_7_DAYS } from 'Types/app/period';
|
||||
import Period, { LAST_24_HOURS } from 'Types/app/period';
|
||||
import { getRE } from 'App/utils';
|
||||
import Filter from './types/filter';
|
||||
import Widget from './types/widget';
|
||||
import Dashboard from './types/dashboard';
|
||||
import { calculateGranularities } from '@/components/Dashboard/components/WidgetDateRange/RangeGranularity';
|
||||
import { CUSTOM_RANGE } from '@/dateRange';
|
||||
|
||||
interface DashboardFilter {
|
||||
query?: string;
|
||||
|
|
@ -34,9 +36,9 @@ export default class DashboardStore {
|
|||
|
||||
comparisonFilter: Filter = new Filter();
|
||||
|
||||
drillDownPeriod: Record<string, any> = Period({ rangeName: LAST_7_DAYS });
|
||||
drillDownPeriod: Record<string, any> = Period({ rangeName: LAST_24_HOURS });
|
||||
|
||||
selectedDensity: number = 7; // depends on default drilldown, 7 points here!!!;
|
||||
selectedDensity: number = 7;
|
||||
|
||||
comparisonPeriods: Record<string, any> = {};
|
||||
|
||||
|
|
@ -83,10 +85,29 @@ export default class DashboardStore {
|
|||
makeAutoObservable(this);
|
||||
|
||||
this.resetDrillDownFilter();
|
||||
|
||||
this.createDensity(this.period.getDuration());
|
||||
reaction(
|
||||
() => this.period,
|
||||
(period) => {
|
||||
this.createDensity(period.getDuration());
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
setDensity = (density: any) => {
|
||||
this.selectedDensity = parseInt(density, 10);
|
||||
resetDensity = () => {
|
||||
this.createDensity(this.period.getDuration());
|
||||
};
|
||||
|
||||
createDensity = (duration: number) => {
|
||||
const densityOpts = calculateGranularities(duration);
|
||||
const defaultOption = densityOpts[densityOpts.length - 2];
|
||||
|
||||
this.setDensity(defaultOption.key);
|
||||
};
|
||||
|
||||
setDensity = (density: number) => {
|
||||
this.selectedDensity = density;
|
||||
};
|
||||
|
||||
get sortedDashboards() {
|
||||
|
|
@ -446,7 +467,7 @@ export default class DashboardStore {
|
|||
this.isSaving = true;
|
||||
try {
|
||||
try {
|
||||
const response = await dashboardService.addWidget(dashboard, metricIds);
|
||||
await dashboardService.addWidget(dashboard, metricIds);
|
||||
toast.success('Card added to dashboard.');
|
||||
} catch {
|
||||
toast.error('Card could not be added.');
|
||||
|
|
@ -456,6 +477,17 @@ export default class DashboardStore {
|
|||
}
|
||||
}
|
||||
|
||||
resetPeriod = () => {
|
||||
if (this.period) {
|
||||
const range = this.period.rangeName;
|
||||
if (range !== CUSTOM_RANGE) {
|
||||
this.period = Period({ rangeName: this.period.rangeName });
|
||||
} else {
|
||||
this.period = Period({ rangeName: LAST_24_HOURS });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setPeriod(period: any) {
|
||||
this.period = Period({
|
||||
start: period.start,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { makeAutoObservable } from 'mobx';
|
||||
import { issueReportsService } from 'App/services';
|
||||
import { makePersistable } from '.store/mobx-persist-store-virtual-858ce4d906/package';
|
||||
import ReportedIssue from '../types/session/assignment';
|
||||
|
||||
export default class IssueReportingStore {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import {
|
|||
SITE_ID_STORAGE_KEY,
|
||||
} from 'App/constants/storageKeys';
|
||||
import { projectsService } from 'App/services';
|
||||
import { toast } from '.store/react-toastify-virtual-9dd0f3eae1/package';
|
||||
import GDPR from './types/gdpr';
|
||||
import Project from './types/project';
|
||||
|
||||
|
|
|
|||
|
|
@ -390,10 +390,11 @@ class SearchStore {
|
|||
// TODO
|
||||
}
|
||||
|
||||
async fetchSessions(
|
||||
fetchSessions = async (
|
||||
force: boolean = false,
|
||||
bookmarked: boolean = false,
|
||||
): Promise<void> {
|
||||
): Promise<void> => {
|
||||
console.log(this.searchInProgress)
|
||||
if (this.searchInProgress) return;
|
||||
const filter = this.instance.toSearch();
|
||||
|
||||
|
|
|
|||
|
|
@ -220,6 +220,7 @@ class SearchStoreLive {
|
|||
updateFilter = (index: number, search: Partial<IFilter>) => {
|
||||
const newFilters = this.instance.filters.map((_filter: any, i: any) => {
|
||||
if (i === index) {
|
||||
search.value = checkFilterValue(search.value);
|
||||
return search;
|
||||
}
|
||||
return _filter;
|
||||
|
|
|
|||
|
|
@ -15,9 +15,7 @@ import { loadFile } from 'App/player/web/network/loadFiles';
|
|||
import { LAST_7_DAYS } from 'Types/app/period';
|
||||
import { filterMap } from 'App/mstore/searchStore';
|
||||
import { getDateRangeFromValue } from 'App/dateRange';
|
||||
import { clean as cleanParams } from '../api_client';
|
||||
import { searchStore, searchStoreLive } from './index';
|
||||
|
||||
const range = getDateRangeFromValue(LAST_7_DAYS);
|
||||
|
||||
const defaultDateFilters = {
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ export default class FilterItem {
|
|||
const json = {
|
||||
type: isMetadata ? FilterKey.METADATA : this.key,
|
||||
isEvent: Boolean(this.isEvent),
|
||||
value: this.value.map((i: any) => (i ? i.toString() : '')),
|
||||
value: this.value?.map((i: any) => (i ? i.toString() : '')) || [],
|
||||
operator: this.operator,
|
||||
source: isMetadata ? this.key.replace(/^_/, '') : this.source,
|
||||
sourceOperator: this.sourceOperator,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import Filter, { IFilter } from 'App/mstore/types/filter';
|
|||
import FilterItem from 'App/mstore/types/filterItem';
|
||||
import { makeAutoObservable, observable } from 'mobx';
|
||||
import { LAST_24_HOURS, LAST_30_DAYS, LAST_7_DAYS } from 'Types/app/period';
|
||||
import { roundToNextMinutes } from '@/utils';
|
||||
|
||||
// @ts-ignore
|
||||
const rangeValue = DATE_RANGE_VALUES.LAST_24_HOURS;
|
||||
|
|
@ -177,6 +178,7 @@ export default class Search {
|
|||
js.rangeValue,
|
||||
js.startDate,
|
||||
js.endDate,
|
||||
15,
|
||||
);
|
||||
js.startDate = startDate;
|
||||
js.endDate = endDate;
|
||||
|
|
@ -190,12 +192,11 @@ export default class Search {
|
|||
rangeName: string,
|
||||
customStartDate: number,
|
||||
customEndDate: number,
|
||||
): {
|
||||
startDate: number;
|
||||
endDate: number;
|
||||
} {
|
||||
roundMinutes?: number,
|
||||
): { startDate: number; endDate: number } {
|
||||
let endDate = new Date().getTime();
|
||||
let startDate: number;
|
||||
const minutes = roundMinutes || 15;
|
||||
|
||||
switch (rangeName) {
|
||||
case LAST_7_DAYS:
|
||||
|
|
@ -206,9 +207,7 @@ export default class Search {
|
|||
break;
|
||||
case CUSTOM_RANGE:
|
||||
if (!customStartDate || !customEndDate) {
|
||||
throw new Error(
|
||||
'Start date and end date must be provided for CUSTOM_RANGE.',
|
||||
);
|
||||
throw new Error('Start date and end date must be provided for CUSTOM_RANGE.');
|
||||
}
|
||||
startDate = customStartDate;
|
||||
endDate = customEndDate;
|
||||
|
|
@ -218,10 +217,12 @@ export default class Search {
|
|||
startDate = endDate - 24 * 60 * 60 * 1000;
|
||||
}
|
||||
|
||||
return {
|
||||
startDate,
|
||||
endDate,
|
||||
};
|
||||
if (rangeName !== CUSTOM_RANGE) {
|
||||
startDate = roundToNextMinutes(startDate, minutes);
|
||||
endDate = roundToNextMinutes(endDate, minutes);
|
||||
}
|
||||
|
||||
return { startDate, endDate };
|
||||
}
|
||||
|
||||
fromJS({ eventsOrder, filters, events, custom, ...filterData }: any) {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import {
|
|||
SHOWN_TIMEZONE,
|
||||
DURATION_FILTER,
|
||||
MOUSE_TRAIL,
|
||||
VIRTUAL_MODE_KEY,
|
||||
} from 'App/constants/storageKeys';
|
||||
import { DateTime, Settings } from 'luxon';
|
||||
|
||||
|
|
@ -71,27 +72,19 @@ export const generateGMTZones = (): Timezone[] => {
|
|||
|
||||
export default class SessionSettings {
|
||||
defaultTimezones = [...generateGMTZones()];
|
||||
|
||||
skipToIssue: boolean = localStorage.getItem(SKIP_TO_ISSUE) === 'true';
|
||||
|
||||
timezone: Timezone;
|
||||
|
||||
durationFilter: any = JSON.parse(
|
||||
localStorage.getItem(DURATION_FILTER) ||
|
||||
JSON.stringify(defaultDurationFilter),
|
||||
);
|
||||
|
||||
captureRate: string = '0';
|
||||
|
||||
conditionalCapture: boolean = false;
|
||||
|
||||
captureConditions: { name: string; captureRate: number; filters: any[] }[] =
|
||||
[];
|
||||
|
||||
mouseTrail: boolean = localStorage.getItem(MOUSE_TRAIL) !== 'false';
|
||||
|
||||
shownTimezone: 'user' | 'local';
|
||||
|
||||
virtualMode: boolean = localStorage.getItem(VIRTUAL_MODE_KEY) === 'true';
|
||||
usingLocal: boolean = false;
|
||||
|
||||
constructor() {
|
||||
|
|
|
|||
|
|
@ -163,6 +163,7 @@ export default class Widget {
|
|||
fromJson(json: any, period?: any) {
|
||||
json.config = json.config || {};
|
||||
runInAction(() => {
|
||||
this.dashboardId = json.dashboardId;
|
||||
this.metricId = json.metricId;
|
||||
this.widgetId = json.widgetId;
|
||||
this.metricValue = this.metricValueFromArray(
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@ export default class MessageLoader {
|
|||
this.session = session;
|
||||
}
|
||||
|
||||
rawMessages: any[] = []
|
||||
createNewParser(
|
||||
shouldDecrypt = true,
|
||||
onMessagesDone: (msgs: PlayerMsg[], file?: string) => void,
|
||||
|
|
@ -69,6 +70,7 @@ export default class MessageLoader {
|
|||
while (!finished) {
|
||||
const msg = fileReader.readNext();
|
||||
if (msg) {
|
||||
this.rawMessages.push(msg)
|
||||
msgs.push(msg);
|
||||
} else {
|
||||
finished = true;
|
||||
|
|
@ -78,7 +80,6 @@ export default class MessageLoader {
|
|||
|
||||
let artificialStartTime = Infinity;
|
||||
let startTimeSet = false;
|
||||
|
||||
msgs.forEach((msg, i) => {
|
||||
if (msg.tp === MType.Redux || msg.tp === MType.ReduxDeprecated) {
|
||||
if ('actionTime' in msg && msg.actionTime) {
|
||||
|
|
@ -343,27 +344,32 @@ const DOMMessages = [
|
|||
MType.CreateElementNode,
|
||||
MType.CreateTextNode,
|
||||
MType.MoveNode,
|
||||
MType.RemoveNode,
|
||||
MType.CreateIFrameDocument,
|
||||
];
|
||||
|
||||
// fixed times: 3
|
||||
function brokenDomSorter(m1: PlayerMsg, m2: PlayerMsg) {
|
||||
if (m1.time !== m2.time) return m1.time - m2.time;
|
||||
|
||||
if (m1.tp === MType.CreateDocument && m2.tp !== MType.CreateDocument)
|
||||
return -1;
|
||||
if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
|
||||
return 1;
|
||||
// if (m1.tp === MType.CreateDocument && m2.tp !== MType.CreateDocument)
|
||||
// return -1;
|
||||
// if (m1.tp !== MType.CreateDocument && m2.tp === MType.CreateDocument)
|
||||
// return 1;
|
||||
|
||||
const m1IsDOM = DOMMessages.includes(m1.tp);
|
||||
const m2IsDOM = DOMMessages.includes(m2.tp);
|
||||
if (m1IsDOM && m2IsDOM) {
|
||||
// @ts-ignore DOM msg has id but checking for 'id' in m is expensive
|
||||
return m1.id - m2.id;
|
||||
}
|
||||
// if (m1.tp === MType.RemoveNode)
|
||||
// return 1;
|
||||
// if (m2.tp === MType.RemoveNode)
|
||||
// return -1;
|
||||
|
||||
if (m1IsDOM && !m2IsDOM) return -1;
|
||||
if (!m1IsDOM && m2IsDOM) return 1;
|
||||
// const m1IsDOM = DOMMessages.includes(m1.tp);
|
||||
// const m2IsDOM = DOMMessages.includes(m2.tp);
|
||||
// if (m1IsDOM && m2IsDOM) {
|
||||
// // @ts-ignore DOM msg has id but checking for 'id' in m is expensive
|
||||
// return m1.id - m2.id;
|
||||
// }
|
||||
|
||||
// if (m1IsDOM && !m2IsDOM) return -1;
|
||||
// if (!m1IsDOM && m2IsDOM) return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// @ts-ignore
|
||||
import { Decoder } from 'syncod';
|
||||
import logger from 'App/logger';
|
||||
|
||||
import { VIRTUAL_MODE_KEY } from '@/constants/storageKeys';
|
||||
import type { Store, ILog, SessionFilesInfo } from 'Player';
|
||||
import TabSessionManager, { TabState } from 'Player/web/TabManager';
|
||||
import ActiveTabManager from 'Player/web/managers/ActiveTabManager';
|
||||
|
|
@ -69,6 +69,7 @@ export interface State extends ScreenState {
|
|||
tabChangeEvents: TabChangeEvent[];
|
||||
closedTabs: string[];
|
||||
sessionStart: number;
|
||||
vModeBadge: boolean;
|
||||
}
|
||||
|
||||
export const visualChanges = [
|
||||
|
|
@ -99,6 +100,7 @@ export default class MessageManager {
|
|||
closedTabs: [],
|
||||
sessionStart: 0,
|
||||
tabNames: {},
|
||||
vModeBadge: false,
|
||||
};
|
||||
|
||||
private clickManager: ListWalker<MouseClick> = new ListWalker();
|
||||
|
|
@ -126,7 +128,6 @@ export default class MessageManager {
|
|||
private tabsAmount = 0;
|
||||
|
||||
private tabChangeEvents: TabChangeEvent[] = [];
|
||||
|
||||
private activeTab = '';
|
||||
|
||||
constructor(
|
||||
|
|
@ -142,8 +143,19 @@ export default class MessageManager {
|
|||
this.activityManager = new ActivityManager(
|
||||
this.session.duration.milliseconds,
|
||||
); // only if not-live
|
||||
|
||||
const vMode = localStorage.getItem(VIRTUAL_MODE_KEY);
|
||||
if (vMode === 'true') {
|
||||
this.setVirtualMode(true);
|
||||
}
|
||||
}
|
||||
|
||||
private virtualMode = false;
|
||||
public setVirtualMode = (virtualMode: boolean) => {
|
||||
this.virtualMode = virtualMode;
|
||||
Object.values(this.tabs).forEach((tab) => tab.setVirtualMode(virtualMode));
|
||||
};
|
||||
|
||||
public getListsFullState = () => {
|
||||
const fullState: Record<string, any> = {};
|
||||
for (const tab in Object.keys(this.tabs)) {
|
||||
|
|
@ -394,6 +406,9 @@ export default class MessageManager {
|
|||
this.sessionStart,
|
||||
this.initialLists,
|
||||
);
|
||||
if (this.virtualMode) {
|
||||
this.tabs[msg.tabId].setVirtualMode(this.virtualMode);
|
||||
}
|
||||
}
|
||||
|
||||
const lastMessageTime = Math.max(msg.time, this.lastMessageTime);
|
||||
|
|
|
|||
|
|
@ -99,6 +99,7 @@ export default class TabSessionManager {
|
|||
tabStates: { [tabId: string]: TabState };
|
||||
tabNames: { [tabId: string]: string };
|
||||
location?: string;
|
||||
vModeBadge?: boolean;
|
||||
}>,
|
||||
private readonly screen: Screen,
|
||||
private readonly id: string,
|
||||
|
|
@ -116,6 +117,13 @@ export default class TabSessionManager {
|
|||
screen,
|
||||
this.session.isMobile,
|
||||
this.setCSSLoading,
|
||||
() => {
|
||||
setTimeout(() => {
|
||||
this.state.update({
|
||||
vModeBadge: true,
|
||||
})
|
||||
}, 0)
|
||||
}
|
||||
);
|
||||
this.lists = new Lists(initialLists);
|
||||
initialLists?.event?.forEach((e: Record<string, string>) => {
|
||||
|
|
@ -126,6 +134,10 @@ export default class TabSessionManager {
|
|||
});
|
||||
}
|
||||
|
||||
public setVirtualMode = (virtualMode: boolean) => {
|
||||
this.pagesManager.setVirtualMode(virtualMode);
|
||||
};
|
||||
|
||||
setSession = (session: any) => {
|
||||
this.session = session;
|
||||
};
|
||||
|
|
@ -348,19 +360,19 @@ export default class TabSessionManager {
|
|||
break;
|
||||
case MType.CreateTextNode:
|
||||
case MType.CreateElementNode:
|
||||
this.windowNodeCounter.addNode(msg.id, msg.parentID);
|
||||
this.windowNodeCounter.addNode(msg);
|
||||
this.performanceTrackManager.setCurrentNodesCount(
|
||||
this.windowNodeCounter.count,
|
||||
);
|
||||
break;
|
||||
case MType.MoveNode:
|
||||
this.windowNodeCounter.moveNode(msg.id, msg.parentID);
|
||||
this.windowNodeCounter.moveNode(msg);
|
||||
this.performanceTrackManager.setCurrentNodesCount(
|
||||
this.windowNodeCounter.count,
|
||||
);
|
||||
break;
|
||||
case MType.RemoveNode:
|
||||
this.windowNodeCounter.removeNode(msg.id);
|
||||
this.windowNodeCounter.removeNode(msg);
|
||||
this.performanceTrackManager.setCurrentNodesCount(
|
||||
this.windowNodeCounter.count,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -21,15 +21,10 @@ export default class WebPlayer extends Player {
|
|||
inspectorMode: false,
|
||||
mobsFetched: false,
|
||||
};
|
||||
|
||||
private inspectorController: InspectorController;
|
||||
|
||||
protected screen: Screen;
|
||||
|
||||
protected readonly messageManager: MessageManager;
|
||||
|
||||
protected readonly messageLoader: MessageLoader;
|
||||
|
||||
private targetMarker: TargetMarker;
|
||||
|
||||
constructor(
|
||||
|
|
@ -104,6 +99,10 @@ export default class WebPlayer extends Player {
|
|||
window.__OPENREPLAY_DEV_TOOLS__.player = this;
|
||||
}
|
||||
|
||||
enableVMode = () => {
|
||||
this.messageManager.setVirtualMode(true);
|
||||
}
|
||||
|
||||
preloadFirstFile(data: Uint8Array, fileKey?: string) {
|
||||
void this.messageLoader.preloadFirstFile(data, fileKey);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -140,11 +140,16 @@ class SimpleHeatmap {
|
|||
ctx.drawImage(this.circle, p[0] - this.r, p[1] - this.r);
|
||||
});
|
||||
|
||||
const colored = ctx.getImageData(0, 0, this.width, this.height);
|
||||
this.colorize(colored.data, this.grad);
|
||||
ctx.putImageData(colored, 0, 0);
|
||||
|
||||
return this;
|
||||
try {
|
||||
const colored = ctx.getImageData(0, 0, this.width, this.height);
|
||||
this.colorize(colored.data, this.grad);
|
||||
ctx.putImageData(colored, 0, 0);
|
||||
} catch (e) {
|
||||
// usually happens if session is corrupted ?
|
||||
console.error('Error while colorizing heatmap:', e);
|
||||
} finally {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
private colorize(
|
||||
|
|
|
|||
|
|
@ -2,21 +2,7 @@ import logger from '@/logger';
|
|||
import { VElement } from 'Player/web/managers/DOM/VirtualDOM';
|
||||
import MessageManager from 'Player/web/MessageManager';
|
||||
import { Socket } from 'socket.io-client';
|
||||
|
||||
let frameCounter = 0;
|
||||
|
||||
function draw(
|
||||
video: HTMLVideoElement,
|
||||
canvas: HTMLCanvasElement,
|
||||
canvasCtx: CanvasRenderingContext2D,
|
||||
) {
|
||||
if (frameCounter % 4 === 0) {
|
||||
canvasCtx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
}
|
||||
frameCounter++;
|
||||
requestAnimationFrame(() => draw(video, canvas, canvasCtx));
|
||||
}
|
||||
|
||||
import { toast } from 'react-toastify';
|
||||
export default class CanvasReceiver {
|
||||
private streams: Map<string, MediaStream> = new Map();
|
||||
|
||||
|
|
@ -25,6 +11,16 @@ export default class CanvasReceiver {
|
|||
|
||||
private cId: string;
|
||||
|
||||
private frameCounter = 0;
|
||||
private canvasesData = new Map<
|
||||
string,
|
||||
{
|
||||
video: HTMLVideoElement;
|
||||
canvas: HTMLCanvasElement;
|
||||
canvasCtx: CanvasRenderingContext2D;
|
||||
}
|
||||
>(new Map());
|
||||
|
||||
// sendSignal – for sending signals (offer/answer/ICE)
|
||||
constructor(
|
||||
private readonly peerIdPrefix: string,
|
||||
|
|
@ -56,6 +52,14 @@ export default class CanvasReceiver {
|
|||
},
|
||||
);
|
||||
|
||||
this.socket.on('webrtc_canvas_stop', (data: { id: string }) => {
|
||||
const { id } = data;
|
||||
const canvasId = getCanvasId(id);
|
||||
this.connections.delete(id);
|
||||
this.streams.delete(id);
|
||||
this.canvasesData.delete(canvasId);
|
||||
});
|
||||
|
||||
this.socket.on('webrtc_canvas_restart', () => {
|
||||
this.clear();
|
||||
});
|
||||
|
|
@ -85,7 +89,7 @@ export default class CanvasReceiver {
|
|||
const stream = event.streams[0];
|
||||
if (stream) {
|
||||
// Detect canvasId from remote peer id
|
||||
const canvasId = id.split('-')[4];
|
||||
const canvasId = getCanvasId(id);
|
||||
this.streams.set(canvasId, stream);
|
||||
setTimeout(() => {
|
||||
const node = this.getNode(parseInt(canvasId, 10));
|
||||
|
|
@ -93,14 +97,15 @@ export default class CanvasReceiver {
|
|||
stream.clone() as MediaStream,
|
||||
node as VElement,
|
||||
);
|
||||
if (node) {
|
||||
draw(
|
||||
videoEl,
|
||||
node.node as HTMLCanvasElement,
|
||||
(node.node as HTMLCanvasElement).getContext(
|
||||
if (node && videoEl) {
|
||||
this.canvasesData.set(canvasId, {
|
||||
video: videoEl,
|
||||
canvas: node.node as HTMLCanvasElement,
|
||||
canvasCtx: (node.node as HTMLCanvasElement)?.getContext(
|
||||
'2d',
|
||||
) as CanvasRenderingContext2D,
|
||||
);
|
||||
});
|
||||
this.draw();
|
||||
} else {
|
||||
logger.log('NODE', canvasId, 'IS NOT FOUND');
|
||||
}
|
||||
|
|
@ -136,7 +141,27 @@ export default class CanvasReceiver {
|
|||
});
|
||||
this.connections.clear();
|
||||
this.streams.clear();
|
||||
this.canvasesData.clear();
|
||||
}
|
||||
|
||||
draw = () => {
|
||||
if (this.frameCounter % 4 === 0) {
|
||||
if (this.canvasesData.size === 0) {
|
||||
return;
|
||||
}
|
||||
this.canvasesData.forEach((canvasData, id) => {
|
||||
const { video, canvas, canvasCtx } = canvasData;
|
||||
const node = this.getNode(parseInt(id, 10));
|
||||
if (node) {
|
||||
canvasCtx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
} else {
|
||||
this.canvasesData.delete(id);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.frameCounter++;
|
||||
requestAnimationFrame(() => this.draw());
|
||||
};
|
||||
}
|
||||
|
||||
function spawnVideo(stream: MediaStream, node: VElement) {
|
||||
|
|
@ -152,6 +177,10 @@ function spawnVideo(stream: MediaStream, node: VElement) {
|
|||
.play()
|
||||
.then(() => true)
|
||||
.catch(() => {
|
||||
toast.error('Click to unpause canvas stream', {
|
||||
autoClose: false,
|
||||
toastId: 'canvas-stream',
|
||||
});
|
||||
// we allow that if user just reloaded the page
|
||||
});
|
||||
|
||||
|
|
@ -164,6 +193,10 @@ function spawnVideo(stream: MediaStream, node: VElement) {
|
|||
const startStream = () => {
|
||||
videoEl
|
||||
.play()
|
||||
.then(() => {
|
||||
toast.dismiss('canvas-stream');
|
||||
clearListeners();
|
||||
})
|
||||
.then(() => console.log('unpaused'))
|
||||
.catch(() => {
|
||||
// we allow that if user just reloaded the page
|
||||
|
|
@ -179,6 +212,10 @@ function checkId(id: string, cId: string): boolean {
|
|||
return id.includes(cId);
|
||||
}
|
||||
|
||||
function getCanvasId(id: string): string {
|
||||
return id.split('-')[4];
|
||||
}
|
||||
|
||||
/** simple peer example
|
||||
* // @ts-ignore
|
||||
* const peer = new SLPeer({ initiator: false })
|
||||
|
|
|
|||
|
|
@ -17,6 +17,9 @@ export interface State {
|
|||
|
||||
export default class RemoteControl {
|
||||
private assistVersion = 1;
|
||||
private isDragging = false;
|
||||
private dragStart: any | null = null;
|
||||
private readonly dragThreshold = 3;
|
||||
|
||||
static readonly INITIAL_STATE: Readonly<State> = {
|
||||
remoteControl: RemoteControlStatus.Disabled,
|
||||
|
|
@ -81,6 +84,7 @@ export default class RemoteControl {
|
|||
}
|
||||
|
||||
private onMouseMove = (e: MouseEvent): void => {
|
||||
if (this.isDragging) return;
|
||||
const data = this.screen.getInternalCoordinates(e);
|
||||
this.emitData('move', [data.x, data.y]);
|
||||
};
|
||||
|
|
@ -154,16 +158,61 @@ export default class RemoteControl {
|
|||
this.emitData('click', [data.x, data.y]);
|
||||
};
|
||||
|
||||
private onMouseDown = (e: MouseEvent): void => {
|
||||
if (this.store.get().annotating) return;
|
||||
|
||||
const { x, y } = this.screen.getInternalViewportCoordinates(e);
|
||||
this.dragStart = [x, y];
|
||||
this.isDragging = false;
|
||||
|
||||
const handleMove = (moveEvent: MouseEvent) => {
|
||||
const { x: mx, y: my } =
|
||||
this.screen.getInternalViewportCoordinates(moveEvent);
|
||||
const [sx, sy] = this.dragStart!;
|
||||
const dx = Math.abs(mx - sx);
|
||||
const dy = Math.abs(my - sy);
|
||||
|
||||
if (
|
||||
!this.isDragging &&
|
||||
(dx > this.dragThreshold || dy > this.dragThreshold)
|
||||
) {
|
||||
this.emitData('startDrag', [sx, sy]);
|
||||
this.isDragging = true;
|
||||
}
|
||||
|
||||
if (this.isDragging) {
|
||||
this.emitData('drag', [mx, my, mx - sx, my - sy]);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUp = () => {
|
||||
if (this.isDragging) {
|
||||
this.emitData('stopDrag');
|
||||
}
|
||||
|
||||
this.dragStart = null;
|
||||
this.isDragging = false;
|
||||
|
||||
window.removeEventListener('mousemove', handleMove);
|
||||
window.removeEventListener('mouseup', handleUp);
|
||||
};
|
||||
|
||||
window.addEventListener('mousemove', handleMove);
|
||||
window.addEventListener('mouseup', handleUp);
|
||||
};
|
||||
|
||||
private toggleRemoteControl(enable: boolean) {
|
||||
if (enable) {
|
||||
this.screen.overlay.addEventListener('mousemove', this.onMouseMove);
|
||||
this.screen.overlay.addEventListener('click', this.onMouseClick);
|
||||
this.screen.overlay.addEventListener('wheel', this.onWheel);
|
||||
this.screen.overlay.addEventListener('mousedown', this.onMouseDown);
|
||||
this.store.update({ remoteControl: RemoteControlStatus.Enabled });
|
||||
} else {
|
||||
this.screen.overlay.removeEventListener('mousemove', this.onMouseMove);
|
||||
this.screen.overlay.removeEventListener('click', this.onMouseClick);
|
||||
this.screen.overlay.removeEventListener('wheel', this.onWheel);
|
||||
this.screen.overlay.removeEventListener('mousedown', this.onMouseDown);
|
||||
this.store.update({ remoteControl: RemoteControlStatus.Disabled });
|
||||
this.toggleAnnotation(false);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,45 +44,34 @@ const ATTR_NAME_REGEXP = /([^\t\n\f \/>"'=]+)/;
|
|||
|
||||
export default class DOMManager extends ListWalker<Message> {
|
||||
private readonly vTexts: Map<number, VText> = new Map(); // map vs object here?
|
||||
|
||||
private readonly vElements: Map<number, VElement> = new Map();
|
||||
|
||||
private readonly olVRoots: Map<number, OnloadVRoot> = new Map();
|
||||
|
||||
/** required to keep track of iframes, frameId : vnodeId */
|
||||
private readonly iframeRoots: Record<number, number> = {};
|
||||
|
||||
private shadowRootParentMap: Map<number, number> = new Map();
|
||||
/** Constructed StyleSheets https://developer.mozilla.org/en-US/docs/Web/API/Document/adoptedStyleSheets
|
||||
* as well as <style> tag owned StyleSheets
|
||||
*/
|
||||
private olStyleSheets: Map<number, OnloadStyleSheet> = new Map();
|
||||
|
||||
/** @depreacted since tracker 4.0.2 Mapping by nodeID */
|
||||
private olStyleSheetsDeprecated: Map<number, OnloadStyleSheet> = new Map();
|
||||
|
||||
private upperBodyId: number = -1;
|
||||
|
||||
private nodeScrollManagers: Map<number, ListWalker<SetNodeScroll>> =
|
||||
new Map();
|
||||
|
||||
private stylesManager: StylesManager;
|
||||
|
||||
private focusManager: FocusManager = new FocusManager(this.vElements);
|
||||
|
||||
private selectionManager: SelectionManager;
|
||||
|
||||
private readonly screen: Screen;
|
||||
|
||||
private readonly isMobile: boolean;
|
||||
|
||||
private readonly stringDict: Record<number, string>;
|
||||
|
||||
private readonly globalDict: {
|
||||
get: (key: string) => string | undefined;
|
||||
all: () => Record<string, string>;
|
||||
};
|
||||
|
||||
public readonly time: number;
|
||||
private virtualMode = false;
|
||||
private hasSlots = false
|
||||
private showVModeBadge?: () => void;
|
||||
|
||||
constructor(params: {
|
||||
screen: Screen;
|
||||
|
|
@ -94,6 +83,8 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
get: (key: string) => string | undefined;
|
||||
all: () => Record<string, string>;
|
||||
};
|
||||
virtualMode?: boolean;
|
||||
showVModeBadge?: () => void;
|
||||
}) {
|
||||
super();
|
||||
this.screen = params.screen;
|
||||
|
|
@ -103,6 +94,8 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
this.globalDict = params.globalDict;
|
||||
this.selectionManager = new SelectionManager(this.vElements, params.screen);
|
||||
this.stylesManager = new StylesManager(params.screen, params.setCssLoading);
|
||||
this.virtualMode = params.virtualMode || false;
|
||||
this.showVModeBadge = params.showVModeBadge;
|
||||
setupWindowLogging(this.vTexts, this.vElements, this.olVRoots);
|
||||
}
|
||||
|
||||
|
|
@ -163,6 +156,11 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
}
|
||||
|
||||
public getNode(id: number) {
|
||||
const mappedId = this.shadowRootParentMap.get(id);
|
||||
if (mappedId !== undefined) {
|
||||
// If this is a shadow root ID, return the parent element instead
|
||||
return this.vElements.get(mappedId);
|
||||
}
|
||||
return this.vElements.get(id) || this.vTexts.get(id);
|
||||
}
|
||||
|
||||
|
|
@ -171,24 +169,21 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
id: number;
|
||||
index: number;
|
||||
}): void {
|
||||
const { parentID, id, index } = msg;
|
||||
let { parentID, id, index } = msg;
|
||||
|
||||
// Check if parentID is a shadow root, and get the real parent element if so
|
||||
const actualParentID = this.shadowRootParentMap.get(parentID);
|
||||
if (actualParentID !== undefined) {
|
||||
parentID = actualParentID;
|
||||
}
|
||||
|
||||
const child = this.vElements.get(id) || this.vTexts.get(id);
|
||||
if (!child) {
|
||||
logger.error('Insert error. Node not found', id);
|
||||
return;
|
||||
}
|
||||
|
||||
const parent = this.vElements.get(parentID) || this.olVRoots.get(parentID);
|
||||
if ('tagName' in child && child.tagName === 'BODY') {
|
||||
const spriteMap = new VSpriteMap(
|
||||
'svg',
|
||||
true,
|
||||
Number.MAX_SAFE_INTEGER - 100,
|
||||
Number.MAX_SAFE_INTEGER - 100,
|
||||
);
|
||||
spriteMap.node.setAttribute('id', 'OPENREPLAY_SPRITES_MAP');
|
||||
spriteMap.node.setAttribute('style', 'display: none;');
|
||||
child.insertChildAt(spriteMap, Number.MAX_SAFE_INTEGER - 100);
|
||||
}
|
||||
if (!parent) {
|
||||
logger.error(
|
||||
`${id} Insert error. Parent vNode ${parentID} not found`,
|
||||
|
|
@ -303,11 +298,19 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
this.insertNode(msg);
|
||||
this.removeBodyScroll(msg.id, vElem);
|
||||
this.removeAutocomplete(vElem);
|
||||
if (msg.tag === 'SLOT') {
|
||||
this.hasSlots = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
case MType.MoveNode:
|
||||
case MType.MoveNode: {
|
||||
// if the parent ID is in shadow root map -> custom elements case
|
||||
if (this.shadowRootParentMap.has(msg.parentID)) {
|
||||
msg.parentID = this.shadowRootParentMap.get(msg.parentID)!;
|
||||
}
|
||||
this.insertNode(msg);
|
||||
return;
|
||||
}
|
||||
case MType.RemoveNode: {
|
||||
const vChild = this.vElements.get(msg.id) || this.vTexts.get(msg.id);
|
||||
if (!vChild) {
|
||||
|
|
@ -440,6 +443,21 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
logger.error('CreateIFrameDocument: Node not found', msg);
|
||||
return;
|
||||
}
|
||||
// shadow DOM for a custom element + SALESFORCE (<slot>)
|
||||
const isCustomElement =
|
||||
vElem.tagName.includes('-') || vElem.tagName === 'SLOT';
|
||||
|
||||
if (isCustomElement) {
|
||||
if (this.virtualMode) {
|
||||
// Store the mapping but don't create the actual shadow root
|
||||
this.shadowRootParentMap.set(msg.id, msg.frameID);
|
||||
return;
|
||||
} else if (this.hasSlots) {
|
||||
this.showVModeBadge?.();
|
||||
}
|
||||
}
|
||||
|
||||
// Real iframes
|
||||
if (this.iframeRoots[msg.frameID] && !this.olVRoots.has(msg.id)) {
|
||||
this.olVRoots.delete(this.iframeRoots[msg.frameID]);
|
||||
}
|
||||
|
|
@ -452,7 +470,11 @@ export default class DOMManager extends ListWalker<Message> {
|
|||
case MType.AdoptedSsInsertRule: {
|
||||
const styleSheet = this.olStyleSheets.get(msg.sheetID);
|
||||
if (!styleSheet) {
|
||||
logger.warn('No stylesheet was created for ', msg);
|
||||
logger.warn(
|
||||
'No stylesheet was created for ',
|
||||
msg,
|
||||
this.olStyleSheets,
|
||||
);
|
||||
return;
|
||||
}
|
||||
insertRule(styleSheet, msg);
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ export default class PagesManager extends ListWalker<DOMManager> {
|
|||
private screen: Screen,
|
||||
private isMobile: boolean,
|
||||
private setCssLoading: (flag: boolean) => void,
|
||||
private showVModeBadge: () => void,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
|
@ -30,6 +31,10 @@ export default class PagesManager extends ListWalker<DOMManager> {
|
|||
Assumed that messages added in a correct time sequence.
|
||||
*/
|
||||
falseOrder = false;
|
||||
virtualMode = false;
|
||||
setVirtualMode = (virtualMode: boolean) => {
|
||||
this.virtualMode = virtualMode;
|
||||
};
|
||||
|
||||
appendMessage(m: Message): void {
|
||||
if ([MType.StringDict, MType.StringDictGlobal].includes(m.tp)) {
|
||||
|
|
@ -62,6 +67,8 @@ export default class PagesManager extends ListWalker<DOMManager> {
|
|||
get: (key: string) => this.globalDictionary.get(key),
|
||||
all: () => Object.fromEntries(this.globalDictionary),
|
||||
},
|
||||
virtualMode: this.virtualMode,
|
||||
showVModeBadge: this.showVModeBadge,
|
||||
}),
|
||||
);
|
||||
this.falseOrder = false;
|
||||
|
|
|
|||
|
|
@ -54,40 +54,45 @@ export default class WindowNodeCounter {
|
|||
this.nodes = [this.root];
|
||||
}
|
||||
|
||||
addNode(id: number, parentID: number) {
|
||||
addNode(msg: { id: number, parentID: number, time: number }): boolean {
|
||||
const { id, parentID } = msg;
|
||||
if (!this.nodes[parentID]) {
|
||||
// TODO: iframe case
|
||||
// console.error(`Wrong! Node with id ${ parentID } (parentId) not found.`);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
if (this.nodes[id]) {
|
||||
// console.error(`Wrong! Node with id ${ id } already exists.`);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
this.nodes[id] = this.nodes[parentID].newChild();
|
||||
return true;
|
||||
}
|
||||
|
||||
removeNode(id: number) {
|
||||
removeNode({ id }: { id: number }) {
|
||||
if (!this.nodes[id]) {
|
||||
// Might be text node
|
||||
// console.error(`Wrong! Node with id ${ id } not found.`);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
this.nodes[id].removeNode();
|
||||
return true;
|
||||
}
|
||||
|
||||
moveNode(id: number, parentId: number) {
|
||||
moveNode(msg: { id: number, parentID: number, time: number }) {
|
||||
const { id, parentID, time } = msg;
|
||||
if (!this.nodes[id]) {
|
||||
console.warn(`Node Counter: Node with id ${id} not found.`);
|
||||
return;
|
||||
console.warn(`Node Counter: Node with id ${id} (parent: ${parentID}) not found. time: ${time}`);
|
||||
return false;
|
||||
}
|
||||
if (!this.nodes[parentId]) {
|
||||
if (!this.nodes[parentID]) {
|
||||
console.warn(
|
||||
`Node Counter: Node with id ${parentId} (parentId) not found.`,
|
||||
`Node Counter: Node with id ${parentID} (parentId) not found. time: ${time}`,
|
||||
);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
this.nodes[id].moveNode(this.nodes[parentId]);
|
||||
this.nodes[id].moveNode(this.nodes[parentID]);
|
||||
return true;
|
||||
}
|
||||
|
||||
get count() {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { DateTime, Interval, Settings } from 'luxon';
|
||||
import Record from 'Types/Record';
|
||||
import { roundToNextMinutes } from '@/utils';
|
||||
|
||||
export const LAST_30_MINUTES = 'LAST_30_MINUTES';
|
||||
export const TODAY = 'TODAY';
|
||||
|
|
@ -30,7 +31,9 @@ function getRange(rangeName, offset) {
|
|||
now.startOf('day'),
|
||||
);
|
||||
case LAST_24_HOURS:
|
||||
return Interval.fromDateTimes(now.minus({ hours: 24 }), now);
|
||||
const mod = now.minute % 15;
|
||||
const next = now.plus({ minutes: mod === 0 ? 15 : 15 - mod }).startOf('minute');
|
||||
return Interval.fromDateTimes(next.minus({ hours: 24 }), next);
|
||||
case LAST_30_MINUTES:
|
||||
return Interval.fromDateTimes(
|
||||
now.minus({ minutes: 30 }).startOf('minute'),
|
||||
|
|
|
|||
|
|
@ -178,6 +178,8 @@ export class Click extends Event {
|
|||
|
||||
selector: string;
|
||||
|
||||
isHighlighted: boolean | undefined = false;
|
||||
|
||||
constructor(evt: ClickEvent, isClickRage?: boolean) {
|
||||
super(evt);
|
||||
this.targetContent = evt.targetContent;
|
||||
|
|
|
|||
|
|
@ -29,6 +29,15 @@ export function debounce(callback, wait, context = this) {
|
|||
};
|
||||
}
|
||||
|
||||
export function debounceCall(func, wait) {
|
||||
let timeout;
|
||||
return function (...args) {
|
||||
const context = this;
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => func.apply(context, args), wait);
|
||||
};
|
||||
}
|
||||
|
||||
export function randomInt(a, b) {
|
||||
const min = (b ? a : 0) - 0.5;
|
||||
const max = b || a || Number.MAX_SAFE_INTEGER;
|
||||
|
|
@ -613,3 +622,14 @@ export function exportAntCsv(tableColumns, tableData, filename = 'table.csv') {
|
|||
const blob = new Blob([csvContent], { type: 'text/csv;charset=utf-8;' });
|
||||
saveAsFile(blob, filename);
|
||||
}
|
||||
|
||||
export function roundToNextMinutes(timestamp: number, minutes: number): number {
|
||||
const date = new Date(timestamp);
|
||||
date.setSeconds(0, 0);
|
||||
const currentMinutes = date.getMinutes();
|
||||
const remainder = currentMinutes % minutes;
|
||||
if (remainder !== 0) {
|
||||
date.setMinutes(currentMinutes + (minutes - remainder));
|
||||
}
|
||||
return date.getTime();
|
||||
}
|
||||
|
|
|
|||
1
scripts/docker-compose/.gitignore
vendored
Normal file
1
scripts/docker-compose/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
hacks/yamls
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
ASSIST_JWT_SECRET=${COMMON_JWT_SECRET}
|
||||
ASSIST_KEY=${COMMON_JWT_SECRET}
|
||||
ASSIST_RECORDS_BUCKET=records
|
||||
ASSIST_URL="http://assist-openreplay:9001/assist/%s"
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
CH_COMPRESSION="false"
|
||||
PYTHONUNBUFFERED="0"
|
||||
REDIS_STRING="redis://redis:6379"
|
||||
S3_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
SITE_URL="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
ch_host="clickhouse"
|
||||
ch_port="9000"
|
||||
ch_port_http="8123"
|
||||
ch_username="default"
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_secret="${COMMON_JWT_SECRET}"
|
||||
pg_dbname="postgres"
|
||||
pg_host="postgresql"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
sessions_bucket=mobs
|
||||
sessions_region="us-east-1"
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader="http://sourcemapreader-openreplay:9000/sourcemaps/%s/sourcemaps"
|
||||
version_number="${COMMON_VERSION}"
|
||||
CLUSTER_URL=""
|
||||
POD_NAMESPACE=""
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
AWS_ACCESS_KEY_ID=${COMMON_S3_KEY}
|
||||
AWS_SECRET_ACCESS_KEY=${COMMON_S3_SECRET}
|
||||
BUCKET_NAME=sessions-assets
|
||||
LICENSE_KEY=''
|
||||
AWS_ENDPOINT='http://minio:9000'
|
||||
AWS_REGION='us-east-1'
|
||||
KAFKA_SERVERS='kafka.db.svc.cluster.local:9092'
|
||||
KAFKA_USE_SSL='false'
|
||||
ASSETS_ORIGIN='https://${COMMON_DOMAIN_NAME}:443/sessions-assets'
|
||||
REDIS_STRING='redis://redis:6379'
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
ASSIST_JWT_SECRET=${COMMON_JWT_SECRET}
|
||||
ASSIST_KEY=${COMMON_JWT_SECRET}
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
S3_HOST="https://${COMMON_DOMAIN_NAME}:443"
|
||||
S3_KEY=changeMeMinioAccessKey
|
||||
S3_SECRET=changeMeMinioPassword
|
||||
REDIS_URL=redis
|
||||
CLEAR_SOCKET_TIME='720'
|
||||
debug='0'
|
||||
redis='false'
|
||||
uws='false'
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
ASSIST_JWT_SECRET=${COMMON_JWT_SECRET}
|
||||
ASSIST_KEY=${COMMON_JWT_SECRET}
|
||||
ASSIST_RECORDS_BUCKET=records
|
||||
ASSIST_URL="http://assist-openreplay:9001/assist/%s"
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
CH_COMPRESSION="false"
|
||||
PYTHONUNBUFFERED="0"
|
||||
REDIS_STRING="redis://redis:6379"
|
||||
S3_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
SITE_URL="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
ch_host="clickhouse"
|
||||
ch_port="9000"
|
||||
ch_port_http="8123"
|
||||
ch_username="default"
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_secret="${COMMON_JWT_SECRET}"
|
||||
pg_dbname="postgres"
|
||||
pg_host="postgresql"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
sessions_bucket=mobs
|
||||
sessions_region="us-east-1"
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader="http://sourcemapreader-openreplay:9000/sourcemaps/%s/sourcemaps"
|
||||
version_number="${COMMON_VERSION}"
|
||||
CLUSTER_URL=""
|
||||
POD_NAMESPACE=""
|
||||
JWT_REFRESH_SECRET=${COMMON_JWT_REFRESH_SECRET}
|
||||
JWT_SPOT_REFRESH_SECRET=${COMMON_JWT_REFRESH_SECRET}
|
||||
JWT_SPOT_SECRET=${COMMON_JWT_SPOT_SECRET}
|
||||
|
|
@ -1,15 +1,20 @@
|
|||
COMMON_VERSION="v1.22.0"
|
||||
COMMON_PROTOCOL="https"
|
||||
COMMON_DOMAIN_NAME="change_me_domain"
|
||||
COMMON_JWT_SECRET="change_me_jwt"
|
||||
COMMON_JWT_SPOT_SECRET="change_me_jwt"
|
||||
COMMON_JWT_REFRESH_SECRET="change_me_jwt_refresh"
|
||||
COMMON_S3_KEY="change_me_s3_key"
|
||||
COMMON_S3_SECRET="change_me_s3_secret"
|
||||
COMMON_PG_PASSWORD="change_me_pg_password"
|
||||
COMMON_VERSION="v1.21.0"
|
||||
COMMON_JWT_REFRESH_SECRET="change_me_jwt_refresh"
|
||||
COMMON_JWT_SPOT_REFRESH_SECRET="change_me_jwt_spot_refresh"
|
||||
COMMON_ASSIST_JWT_SECRET="change_me_assist_jwt_secret"
|
||||
COMMON_ASSIST_KEY="change_me_assist_key"
|
||||
|
||||
## DB versions
|
||||
######################################
|
||||
POSTGRES_VERSION="14.5.0"
|
||||
POSTGRES_VERSION="17.2.0"
|
||||
REDIS_VERSION="6.0.12-debian-10-r33"
|
||||
MINIO_VERSION="2023.2.10-debian-11-r1"
|
||||
CLICKHOUSE_VERSION="25.1-alpine"
|
||||
######################################
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
CH_USERNAME='default'
|
||||
CH_PASSWORD=''
|
||||
CLICKHOUSE_STRING='clickhouse-openreplay-clickhouse.db.svc.cluster.local:9000/default'
|
||||
LICENSE_KEY=''
|
||||
KAFKA_SERVERS='kafka.db.svc.cluster.local:9092'
|
||||
KAFKA_USE_SSL='false'
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
QUICKWIT_ENABLED='false'
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql:5432/postgres"
|
||||
REDIS_STRING='redis://redis:6379'
|
||||
ch_db='default'
|
||||
|
|
@ -1,15 +1,34 @@
|
|||
|
||||
# vim: ft=yaml
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
|
||||
postgresql:
|
||||
image: bitnami/postgresql:${POSTGRES_VERSION}
|
||||
container_name: postgres
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
- pgdata:/bitnami/postgresql
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- postgresql.db.svc.cluster.local
|
||||
environment:
|
||||
POSTGRESQL_PASSWORD: ${COMMON_PG_PASSWORD}
|
||||
POSTGRESQL_PASSWORD: "${COMMON_PG_PASSWORD}"
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:${CLICKHOUSE_VERSION}
|
||||
container_name: clickhouse
|
||||
volumes:
|
||||
- clickhouse:/var/lib/clickhouse
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- clickhouse-openreplay-clickhouse.db.svc.cluster.local
|
||||
environment:
|
||||
CLICKHOUSE_USER: "default"
|
||||
CLICKHOUSE_PASSWORD: ""
|
||||
CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: "1"
|
||||
|
||||
redis:
|
||||
image: bitnami/redis:${REDIS_VERSION}
|
||||
|
|
@ -17,7 +36,9 @@ services:
|
|||
volumes:
|
||||
- redisdata:/bitnami/redis/data
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- redis-master.db.svc.cluster.local
|
||||
environment:
|
||||
ALLOW_EMPTY_PASSWORD: "yes"
|
||||
|
||||
|
|
@ -27,7 +48,9 @@ services:
|
|||
volumes:
|
||||
- miniodata:/bitnami/minio/data
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- minio.db.svc.cluster.local
|
||||
ports:
|
||||
- 9001:9001
|
||||
environment:
|
||||
|
|
@ -63,7 +86,7 @@ services:
|
|||
volumes:
|
||||
- ../helmcharts/openreplay/files/minio.sh:/tmp/minio.sh
|
||||
environment:
|
||||
MINIO_HOST: http://minio:9000
|
||||
MINIO_HOST: http://minio.db.svc.cluster.local:9000
|
||||
MINIO_ACCESS_KEY: ${COMMON_S3_KEY}
|
||||
MINIO_SECRET_KEY: ${COMMON_S3_SECRET}
|
||||
user: root
|
||||
|
|
@ -80,7 +103,7 @@ services:
|
|||
bash /tmp/minio.sh init || exit 100
|
||||
|
||||
db-migration:
|
||||
image: bitnami/postgresql:14.5.0
|
||||
image: bitnami/postgresql:14.5.0
|
||||
container_name: db-migration
|
||||
profiles:
|
||||
- "migration"
|
||||
|
|
@ -101,65 +124,317 @@ services:
|
|||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
until PGPASSWORD=${COMMON_PG_PASSWORD} psql -h postgresql -U postgres -d postgres -c '\q'; do
|
||||
until psql -c '\q'; do
|
||||
echo "PostgreSQL is unavailable - sleeping"
|
||||
sleep 1
|
||||
done
|
||||
echo "PostgreSQL is up - executing command"
|
||||
psql -v ON_ERROR_STOP=1 -f /tmp/init_schema.sql
|
||||
|
||||
frontend-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/frontend:${COMMON_VERSION}
|
||||
container_name: frontend
|
||||
clickhouse-migration:
|
||||
image: clickhouse/clickhouse-server:${CLICKHOUSE_VERSION}
|
||||
container_name: clickhouse-migration
|
||||
profiles:
|
||||
- "migration"
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- minio-migration
|
||||
networks:
|
||||
- openreplay-net
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../schema/db/init_dbs/clickhouse/create/init_schema.sql:/tmp/init_schema.sql
|
||||
environment:
|
||||
CH_HOST: "clickhouse-openreplay-clickhouse.db.svc.cluster.local"
|
||||
CH_PORT: "9000"
|
||||
CH_PORT_HTTP: "8123"
|
||||
CH_USERNAME: "default"
|
||||
CH_PASSWORD: ""
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
# Checking variable is empty. Shell independant method.
|
||||
# Wait for Minio to be ready
|
||||
until nc -z -v -w30 clickhouse-openreplay-clickhouse.db.svc.cluster.local 9000; do
|
||||
echo "Waiting for Minio server to be ready..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "clickhouse is up - executing command"
|
||||
clickhouse-client -h clickhouse-openreplay-clickhouse.db.svc.cluster.local --user default --port 9000 --multiquery < /tmp/init_schema.sql || true
|
||||
|
||||
alerts-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/alerts:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: alerts
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- alerts-openreplay
|
||||
- alerts-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- alerts.env
|
||||
- docker-envs/alerts.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
|
||||
analytics-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/analytics:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: analytics
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- analytics-openreplay
|
||||
- analytics-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/analytics.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
http-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/http:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: http
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- http-openreplay
|
||||
- http-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/http.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
images-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/images:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: images
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- images-openreplay
|
||||
- images-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/images.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
integrations-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/integrations:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: integrations
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- integrations-openreplay
|
||||
- integrations-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/integrations.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
sink-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/sink:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: sink
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- sink-openreplay
|
||||
- sink-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/sink.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
sourcemapreader-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/sourcemapreader:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: sourcemapreader
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- sourcemapreader-openreplay
|
||||
- sourcemapreader-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/sourcemapreader.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
spot-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/spot:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: spot
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- spot-openreplay
|
||||
- spot-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/spot.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
storage-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/storage:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: storage
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- storage-openreplay
|
||||
- storage-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/storage.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
assets-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/assets:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: assets
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- assets-openreplay
|
||||
- assets-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- assets.env
|
||||
- docker-envs/assets.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
|
||||
assist-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/assist:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: assist
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- assist-openreplay
|
||||
- assist-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- assist.env
|
||||
- docker-envs/assist.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
|
||||
canvases-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/canvases:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: canvases
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- canvases-openreplay
|
||||
- canvases-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/canvases.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
chalice-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/chalice:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: chalice
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- chalice-openreplay
|
||||
- chalice-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/chalice.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
db-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/db:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: db
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- db-openreplay
|
||||
- db-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- db.env
|
||||
- docker-envs/db.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
|
||||
ender-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/ender:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: ender
|
||||
networks:
|
||||
- openreplay-net
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- ender-openreplay
|
||||
- ender-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- ender.env
|
||||
- docker-envs/ender.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
|
||||
frontend-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/frontend:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
container_name: frontend
|
||||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- frontend-openreplay
|
||||
- frontend-openreplay.app.svc.cluster.local
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
env_file:
|
||||
- docker-envs/frontend.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
heuristics-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/heuristics:${COMMON_VERSION}
|
||||
domainname: app.svc.cluster.local
|
||||
|
|
@ -167,88 +442,15 @@ services:
|
|||
networks:
|
||||
openreplay-net:
|
||||
aliases:
|
||||
- heuristics-openreplay
|
||||
- heuristics-openreplay.app.svc.cluster.local
|
||||
env_file:
|
||||
- heuristics.env
|
||||
restart: unless-stopped
|
||||
|
||||
imagestorage-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/imagestorage:${COMMON_VERSION}
|
||||
container_name: imagestorage
|
||||
env_file:
|
||||
- imagestorage.env
|
||||
networks:
|
||||
- openreplay-net
|
||||
restart: unless-stopped
|
||||
|
||||
integrations-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/integrations:${COMMON_VERSION}
|
||||
container_name: integrations
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- integrations.env
|
||||
restart: unless-stopped
|
||||
|
||||
peers-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/peers:${COMMON_VERSION}
|
||||
container_name: peers
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- peers.env
|
||||
restart: unless-stopped
|
||||
|
||||
sourcemapreader-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/sourcemapreader:${COMMON_VERSION}
|
||||
container_name: sourcemapreader
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- sourcemapreader.env
|
||||
restart: unless-stopped
|
||||
|
||||
http-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/http:${COMMON_VERSION}
|
||||
container_name: http
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- http.env
|
||||
restart: unless-stopped
|
||||
|
||||
chalice-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/chalice:${COMMON_VERSION}
|
||||
container_name: chalice
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- chalice.env
|
||||
restart: unless-stopped
|
||||
|
||||
sink-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/sink:${COMMON_VERSION}
|
||||
container_name: sink
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- sink.env
|
||||
restart: unless-stopped
|
||||
|
||||
storage-openreplay:
|
||||
image: public.ecr.aws/p1t3u8a3/storage:${COMMON_VERSION}
|
||||
container_name: storage
|
||||
volumes:
|
||||
- shared-volume:/mnt/efs
|
||||
networks:
|
||||
- openreplay-net
|
||||
env_file:
|
||||
- storage.env
|
||||
- docker-envs/heuristics.env
|
||||
environment: {} # Fallback empty environment if env_file is missing
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
nginx-openreplay:
|
||||
image: nginx:latest
|
||||
|
|
@ -280,6 +482,7 @@ services:
|
|||
|
||||
volumes:
|
||||
pgdata:
|
||||
clickhouse:
|
||||
redisdata:
|
||||
miniodata:
|
||||
shared-volume:
|
||||
|
|
|
|||
27
scripts/docker-compose/docker-envs/alerts.env
Normal file
27
scripts/docker-compose/docker-envs/alerts.env
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
version_number="v1.22.0"
|
||||
pg_host="postgresql.db.svc.cluster.local"
|
||||
pg_port="5432"
|
||||
pg_dbname="postgres"
|
||||
ch_host="clickhouse-openreplay-clickhouse.db.svc.cluster.local"
|
||||
ch_port="9000"
|
||||
ch_port_http="8123"
|
||||
ch_username="default"
|
||||
ch_password=""
|
||||
pg_user="postgres"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
SITE_URL="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
EMAIL_HOST=""
|
||||
EMAIL_PORT="587"
|
||||
EMAIL_USER=""
|
||||
EMAIL_PASSWORD=""
|
||||
EMAIL_USE_TLS="true"
|
||||
EMAIL_USE_SSL="false"
|
||||
EMAIL_SSL_KEY=""
|
||||
EMAIL_SSL_CERT=""
|
||||
EMAIL_FROM="OpenReplay<do-not-reply@openreplay.com>"
|
||||
LOGLEVEL="INFO"
|
||||
PYTHONUNBUFFERED="0"
|
||||
11
scripts/docker-compose/docker-envs/analytics.env
Normal file
11
scripts/docker-compose/docker-envs/analytics.env
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
TOKEN_SECRET="secret_token_string"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
JWT_SECRET="${COMMON_JWT_SECRET}"
|
||||
CH_USERNAME="default"
|
||||
CH_PASSWORD=""
|
||||
CLICKHOUSE_STRING="clickhouse-openreplay-clickhouse.db.svc.cluster.local:9000/"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
10
scripts/docker-compose/docker-envs/assets.env
Normal file
10
scripts/docker-compose/docker-envs/assets.env
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
BUCKET_NAME="sessions-assets"
|
||||
LICENSE_KEY=""
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
AWS_REGION="us-east-1"
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
ASSETS_ORIGIN="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}/sessions-assets"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
11
scripts/docker-compose/docker-envs/assist.env
Normal file
11
scripts/docker-compose/docker-envs/assist.env
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
ASSIST_JWT_SECRET="${COMMON_ASSIST_JWT_SECRET}"
|
||||
ASSIST_KEY="${COMMON_ASSIST_KEY}"
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
S3_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}:80"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
REDIS_URL="redis-master.db.svc.cluster.local"
|
||||
CLEAR_SOCKET_TIME="720"
|
||||
debug="0"
|
||||
redis="false"
|
||||
uws="false"
|
||||
10
scripts/docker-compose/docker-envs/canvases.env
Normal file
10
scripts/docker-compose/docker-envs/canvases.env
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
AWS_REGION="us-east-1"
|
||||
BUCKET_NAME="mobs"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
FS_CLEAN_HRS="24"
|
||||
61
scripts/docker-compose/docker-envs/chalice.env
Normal file
61
scripts/docker-compose/docker-envs/chalice.env
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local"
|
||||
ch_username="default"
|
||||
ch_password=""
|
||||
ch_host="clickhouse-openreplay-clickhouse.db.svc.cluster.local"
|
||||
ch_port="9000"
|
||||
ch_port_http="8123"
|
||||
sourcemaps_reader="http://sourcemapreader-openreplay.app.svc.cluster.local:9000/%s/sourcemaps"
|
||||
ASSIST_URL="http://assist-openreplay.app.svc.cluster.local:9001/assist/%s"
|
||||
ASSIST_JWT_SECRET="${COMMON_ASSIST_JWT_SECRET}"
|
||||
JWT_SECRET="${COMMON_JWT_SECRET}"
|
||||
JWT_SPOT_SECRET="${COMMON_JWT_SPOT_SECRET}"
|
||||
ASSIST_KEY="${COMMON_ASSIST_KEY}"
|
||||
LICENSE_KEY=""
|
||||
version_number="v1.22.0"
|
||||
pg_host="postgresql.db.svc.cluster.local"
|
||||
pg_port="5432"
|
||||
pg_dbname="postgres"
|
||||
pg_user="postgres"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
SITE_URL="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
AWS_DEFAULT_REGION="us-east-1"
|
||||
sessions_region="us-east-1"
|
||||
ASSIST_RECORDS_BUCKET="records"
|
||||
sessions_bucket="mobs"
|
||||
IOS_VIDEO_BUCKET="mobs"
|
||||
sourcemaps_bucket="sourcemaps"
|
||||
js_cache_bucket="sessions-assets"
|
||||
EMAIL_HOST=""
|
||||
EMAIL_PORT="587"
|
||||
EMAIL_USER=""
|
||||
EMAIL_PASSWORD=""
|
||||
EMAIL_USE_TLS="true"
|
||||
EMAIL_USE_SSL="false"
|
||||
EMAIL_SSL_KEY=""
|
||||
EMAIL_SSL_CERT=""
|
||||
EMAIL_FROM="OpenReplay<do-not-reply@openreplay.com>"
|
||||
CH_COMPRESSION="false"
|
||||
CLUSTER_URL="svc.cluster.local"
|
||||
JWT_EXPIRATION="86400"
|
||||
JWT_REFRESH_SECRET="${COMMON_JWT_REFRESH_SECRET}"
|
||||
JWT_SPOT_REFRESH_SECRET="${COMMON_JWT_SPOT_REFRESH_SECRET}"
|
||||
LOGLEVEL="INFO"
|
||||
PYTHONUNBUFFERED="0"
|
||||
SAML2_MD_URL=""
|
||||
announcement_url=""
|
||||
assist_secret=""
|
||||
async_Token=""
|
||||
captcha_key=""
|
||||
captcha_server=""
|
||||
iceServers=""
|
||||
idp_entityId=""
|
||||
idp_name=""
|
||||
idp_sls_url=""
|
||||
idp_sso_url=""
|
||||
idp_tenantKey=""
|
||||
idp_x509cert=""
|
||||
jwt_algorithm="HS512"
|
||||
11
scripts/docker-compose/docker-envs/db.env
Normal file
11
scripts/docker-compose/docker-envs/db.env
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
CH_USERNAME="default"
|
||||
CH_PASSWORD=""
|
||||
CLICKHOUSE_STRING="clickhouse-openreplay-clickhouse.db.svc.cluster.local:9000/default"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
QUICKWIT_ENABLED="false"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
ch_db="default"
|
||||
6
scripts/docker-compose/docker-envs/ender.env
Normal file
6
scripts/docker-compose/docker-envs/ender.env
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
2
scripts/docker-compose/docker-envs/frontend.env
Normal file
2
scripts/docker-compose/docker-envs/frontend.env
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
TRACKER_HOST="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}/script"
|
||||
HTTP_PORT="80"
|
||||
4
scripts/docker-compose/docker-envs/heuristics.env
Normal file
4
scripts/docker-compose/docker-envs/heuristics.env
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
15
scripts/docker-compose/docker-envs/http.env
Normal file
15
scripts/docker-compose/docker-envs/http.env
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
BUCKET_NAME="uxtesting-records"
|
||||
CACHE_ASSETS="true"
|
||||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
AWS_REGION="us-east-1"
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
JWT_SECRET="${COMMON_JWT_SECRET}"
|
||||
JWT_SPOT_SECRET="${COMMON_JWT_SPOT_SECRET}"
|
||||
TOKEN_SECRET="${COMMON_TOKEN_SECRET}"
|
||||
10
scripts/docker-compose/docker-envs/images.env
Normal file
10
scripts/docker-compose/docker-envs/images.env
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
AWS_REGION="us-east-1"
|
||||
BUCKET_NAME="mobs"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
FS_CLEAN_HRS="24"
|
||||
13
scripts/docker-compose/docker-envs/integrations.env
Normal file
13
scripts/docker-compose/docker-envs/integrations.env
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
AWS_REGION="us-east-1"
|
||||
BUCKET_NAME="mobs"
|
||||
JWT_SECRET="${COMMON_JWT_SECRET}"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
TOKEN_SECRET="secret_token_string"
|
||||
5
scripts/docker-compose/docker-envs/sink.env
Normal file
5
scripts/docker-compose/docker-envs/sink.env
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
ASSETS_ORIGIN="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}/sessions-assets"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
11
scripts/docker-compose/docker-envs/sourcemapreader.env
Normal file
11
scripts/docker-compose/docker-envs/sourcemapreader.env
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
SMR_HOST="0.0.0.0"
|
||||
S3_HOST="http://minio.db.svc.cluster.local:9000"
|
||||
S3_KEY="${COMMON_S3_KEY}"
|
||||
S3_SECRET="${COMMON_S3_SECRET}"
|
||||
AWS_REGION="us-east-1"
|
||||
LICENSE_KEY=""
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres"
|
||||
ASSETS_ORIGIN="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}/sessions-assets"
|
||||
16
scripts/docker-compose/docker-envs/spot.env
Normal file
16
scripts/docker-compose/docker-envs/spot.env
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
CACHE_ASSETS="true"
|
||||
FS_CLEAN_HRS="24"
|
||||
TOKEN_SECRET="secret_token_string"
|
||||
AWS_ACCESS_KEY_ID="${COMMON_S3_KEY}"
|
||||
AWS_SECRET_ACCESS_KEY="${COMMON_S3_SECRET}"
|
||||
BUCKET_NAME="spots"
|
||||
AWS_REGION="us-east-1"
|
||||
AWS_ENDPOINT="${COMMON_PROTOCOL}://${COMMON_DOMAIN_NAME}"
|
||||
LICENSE_KEY=""
|
||||
KAFKA_SERVERS="kafka.db.svc.cluster.local:9092"
|
||||
KAFKA_USE_SSL="false"
|
||||
JWT_SECRET="${COMMON_JWT_SECRET}"
|
||||
JWT_SPOT_SECRET="${COMMON_JWT_SPOT_SECRET}"
|
||||
pg_password="${COMMON_PG_PASSWORD}"
|
||||
POSTGRES_STRING="postgres://postgres:${COMMON_PG_PASSWORD}@postgresql.db.svc.cluster.local:5432/postgres?sslmode=disable"
|
||||
REDIS_STRING="redis://redis-master.db.svc.cluster.local:6379"
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue