Compare commits
94 commits
main
...
snyk-fix-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
effd4c0a6c | ||
|
|
6a142db59f | ||
|
|
0f26fcf3e4 | ||
|
|
f0488edf83 | ||
|
|
a5df7ffb90 | ||
|
|
e2f120b77f | ||
|
|
b39a2dbaa9 | ||
|
|
65c497f902 | ||
|
|
ebf5c1f49c | ||
|
|
31f2fe6d48 | ||
|
|
fe4a38d6fa | ||
|
|
b16fd7ffff | ||
|
|
11406d4dbf | ||
|
|
f149ace8f2 | ||
|
|
3002386673 | ||
|
|
0092b2fcb7 | ||
|
|
ed281b4f7d | ||
|
|
0ab36aac03 | ||
|
|
52fe465dc8 | ||
|
|
4b3bbe1e8d | ||
|
|
548930c5be | ||
|
|
bffd6e51b4 | ||
|
|
ad37e94cc7 | ||
|
|
9dbf682efe | ||
|
|
ec867328ba | ||
|
|
85fe92e352 | ||
|
|
a30cfb8468 | ||
|
|
ddc3d1202f | ||
|
|
dbbe4cd2e1 | ||
|
|
3249329537 | ||
|
|
82c33dab0c | ||
|
|
fa4a8f0c67 | ||
|
|
933626d3ae | ||
|
|
ab6921c6d9 | ||
|
|
ee87e89805 | ||
|
|
398e50a9b0 | ||
|
|
e0a2c9b97d | ||
|
|
4099eea0f6 | ||
|
|
e424ccd26b | ||
|
|
f74b25f81a | ||
|
|
6bf07df5e6 | ||
|
|
5504964fe4 | ||
|
|
ee6b22b579 | ||
|
|
7b0027e3bd | ||
|
|
e2bfc23064 | ||
|
|
eea362969e | ||
|
|
0d88edb572 | ||
|
|
b6976dfec6 | ||
|
|
e0ffc4175d | ||
|
|
3663e21c67 | ||
|
|
a68e19b239 | ||
|
|
55576d1251 | ||
|
|
8784615509 | ||
|
|
8c6ce9c068 | ||
|
|
f9aaa45b0c | ||
|
|
fa91609d8a | ||
|
|
be717cd01a | ||
|
|
cf6e7511a2 | ||
|
|
c1c1617766 | ||
|
|
32525385af | ||
|
|
c19f258860 | ||
|
|
2c31a239bd | ||
|
|
7f6d0d07c8 | ||
|
|
2152d1c3db | ||
|
|
87c3b59a59 | ||
|
|
b51b7dcfad | ||
|
|
452dde1620 | ||
|
|
707939a37f | ||
|
|
eb47338c1e | ||
|
|
2192681149 | ||
|
|
c8d0d1e949 | ||
|
|
653221cbd8 | ||
|
|
499fff6646 | ||
|
|
0b4c0e092d | ||
|
|
dbfbf55b82 | ||
|
|
e327522829 | ||
|
|
24f489dcc6 | ||
|
|
4503aeca25 | ||
|
|
e97b519598 | ||
|
|
7926279342 | ||
|
|
61c415bffa | ||
|
|
0de0dd4cbf | ||
|
|
ad8e35198b | ||
|
|
76ddea74f3 | ||
|
|
1765c0b5bf | ||
|
|
621f63d90e | ||
|
|
89f59b2054 | ||
|
|
1a5d00444e | ||
|
|
4ee57c4e87 | ||
|
|
e8c8b861e0 | ||
|
|
7f05a81b0b | ||
|
|
b58b446ca6 | ||
|
|
65f843805c | ||
|
|
90059f59ca |
134 changed files with 1661 additions and 2500 deletions
17
.github/workflows/alerts-ee.yaml
vendored
17
.github/workflows/alerts-ee.yaml
vendored
|
|
@ -145,11 +145,14 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
||||
|
|
|
|||
17
.github/workflows/alerts.yaml
vendored
17
.github/workflows/alerts.yaml
vendored
|
|
@ -133,12 +133,15 @@ jobs:
|
|||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
|
||||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
|
||||
|
|
|
|||
18
.github/workflows/api-ee.yaml
vendored
18
.github/workflows/api-ee.yaml
vendored
|
|
@ -144,11 +144,15 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
17
.github/workflows/api.yaml
vendored
17
.github/workflows/api.yaml
vendored
|
|
@ -133,11 +133,12 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
17
.github/workflows/assist-ee.yaml
vendored
17
.github/workflows/assist-ee.yaml
vendored
|
|
@ -116,11 +116,12 @@ jobs:
|
|||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
16
.github/workflows/assist-stats.yaml
vendored
16
.github/workflows/assist-stats.yaml
vendored
|
|
@ -130,11 +130,13 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
|
|
|||
17
.github/workflows/assist.yaml
vendored
17
.github/workflows/assist.yaml
vendored
|
|
@ -115,11 +115,12 @@ jobs:
|
|||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
17
.github/workflows/crons-ee.yaml
vendored
17
.github/workflows/crons-ee.yaml
vendored
|
|
@ -145,11 +145,12 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
17
.github/workflows/db-migrate.yaml
vendored
17
.github/workflows/db-migrate.yaml
vendored
|
|
@ -140,12 +140,13 @@ jobs:
|
|||
IMAGE_TAG: ${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
# AWS_REGION: eu-central-1
|
||||
# AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
|
|
|||
2
.github/workflows/frontend-dev.yaml
vendored
2
.github/workflows/frontend-dev.yaml
vendored
|
|
@ -78,4 +78,4 @@ jobs:
|
|||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
iMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
|
|
|
|||
17
.github/workflows/frontend.yaml
vendored
17
.github/workflows/frontend.yaml
vendored
|
|
@ -133,11 +133,12 @@ jobs:
|
|||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
# AWS_REGION: eu-central-1
|
||||
# AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
158
.github/workflows/patch-build.yaml
vendored
Normal file
158
.github/workflows/patch-build.yaml
vendored
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
# Ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma separated names of services to build(in small letters).'
|
||||
required: true
|
||||
default: 'chalice,frontend'
|
||||
|
||||
name: Build patches from main branch, Raise PR to Main, and Push to tag
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from main
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Rebase with main branch, to make sure the code has latest main changes
|
||||
run: |
|
||||
git pull --rebase origin main
|
||||
|
||||
- name: Downloading yq
|
||||
run: |
|
||||
VERSION="v4.42.1"
|
||||
sudo wget https://github.com/mikefarah/yq/releases/download/${VERSION}/yq_linux_amd64 -O /usr/bin/yq
|
||||
sudo chmod +x /usr/bin/yq
|
||||
|
||||
# Configure AWS credentials for the first registry
|
||||
- name: Configure AWS credentials for RELEASE_ARM_REGISTRY
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_DEPOT_ACCESS_KEY }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_DEPOT_SECRET_KEY }}
|
||||
aws-region: ${{ secrets.AWS_DEPOT_DEFAULT_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR for RELEASE_ARM_REGISTRY
|
||||
id: login-ecr-arm
|
||||
run: |
|
||||
aws ecr get-login-password --region ${{ secrets.AWS_DEPOT_DEFAULT_REGION }} | docker login --username AWS --password-stdin ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
- name: Get HEAD Commit ID
|
||||
run: echo "HEAD_COMMIT_ID=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- name: Define Branch Name
|
||||
run: echo "BRANCH_NAME=patch/main/${HEAD_COMMIT_ID}" >> $GITHUB_ENV
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
|
||||
- name: Build
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
run: |
|
||||
set -exo pipefail
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git checkout -b $BRANCH_NAME
|
||||
working_dir=$(pwd)
|
||||
function image_version(){
|
||||
local service=$1
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$service/Chart.yaml"
|
||||
current_version=$(yq eval '.AppVersion' $chart_path)
|
||||
new_version=$(echo $current_version | awk -F. '{$NF += 1 ; print $1"."$2"."$3}')
|
||||
echo $new_version
|
||||
# yq eval ".AppVersion = \"$new_version\"" -i $chart_path
|
||||
}
|
||||
function clone_msaas() {
|
||||
[ -d $MSAAS_REPO_FOLDER ] || {
|
||||
git clone -b dev --recursive https://x-access-token:$MSAAS_REPO_CLONE_TOKEN@$MSAAS_REPO_URL $MSAAS_REPO_FOLDER
|
||||
cd $MSAAS_REPO_FOLDER
|
||||
bash git-init.sh
|
||||
git checkout
|
||||
}
|
||||
}
|
||||
function build_managed() {
|
||||
local service=$1
|
||||
local version=$2
|
||||
echo building managed
|
||||
clone_msaas
|
||||
if [[ $service == 'chalice' ]]; then
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/api
|
||||
else
|
||||
cd $MSAAS_REPO_FOLDER/openreplay/$service
|
||||
fi
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh >> /tmp/arm.txt
|
||||
}
|
||||
# Checking for backend images
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
echo Services: "${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
# Build FOSS
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
[[ $SERVICE == 'chalice' ]] && cd $working_dir/api || cd $SERVICE
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
version=$(image_version $SERVICE)
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash build.sh $foss_build_args
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash build.sh $foss_build_args
|
||||
echo IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash build.sh $ee_build_args
|
||||
IMAGE_TAG=$version-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$DOCKER_REPO_OSS PUSH_IMAGE=0 bash build.sh $ee_build_args
|
||||
if [[ "$SERVICE" != "chalice" && "$SERVICE" != "frontend" ]]; then
|
||||
IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh $foss_build_args
|
||||
echo IMAGE_TAG=$version DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=arm64 DOCKER_REPO=$DOCKER_REPO_ARM PUSH_IMAGE=0 bash build.sh $foss_build_args
|
||||
else
|
||||
build_managed $SERVICE $version
|
||||
fi
|
||||
cd $working_dir
|
||||
chart_path="$working_dir/scripts/helmcharts/openreplay/charts/$SERVICE/Chart.yaml"
|
||||
yq eval ".AppVersion = \"$version\"" -i $chart_path
|
||||
git add $chart_path
|
||||
git commit -m "Increment $SERVICE chart version"
|
||||
git push --set-upstream origin $BRANCH_NAME
|
||||
done
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
with:
|
||||
github_token: ${{ secrets.ACTIONS_COMMMIT_TOKEN }}
|
||||
source_branch: ${{ env.BRANCH_NAME }}
|
||||
destination_branch: "main"
|
||||
pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}"
|
||||
pr_body: |
|
||||
This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID.
|
||||
Once this PR is merged, To update the latest tag, run the following workflow.
|
||||
https://github.com/openreplay/openreplay/actions/workflows/update-tag.yaml
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO_ARM: ${{ secrets.RELEASE_ARM_REGISTRY }}
|
||||
# DOCKER_REPO_OSS: ${{ secrets.RELEASE_OSS_REGISTRY }}
|
||||
# MSAAS_REPO_CLONE_TOKEN: ${{ secrets.MSAAS_REPO_CLONE_TOKEN }}
|
||||
# MSAAS_REPO_URL: ${{ secrets.MSAAS_REPO_URL }}
|
||||
# MSAAS_REPO_FOLDER: /tmp/msaas
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
17
.github/workflows/peers-ee.yaml
vendored
17
.github/workflows/peers-ee.yaml
vendored
|
|
@ -133,11 +133,12 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
16
.github/workflows/peers.yaml
vendored
16
.github/workflows/peers.yaml
vendored
|
|
@ -130,11 +130,13 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
|
|
|||
2
.github/workflows/pr-env-delete.yaml
vendored
2
.github/workflows/pr-env-delete.yaml
vendored
|
|
@ -83,4 +83,4 @@ jobs:
|
|||
]
|
||||
}
|
||||
EOF
|
||||
aws route53 change-resource-record-sets --hosted-zone-id ${{ secrets.OR_PR_HOSTED_ZONE_ID }} --change-batch file://route53-changes.json
|
||||
iws route53 change-resource-record-sets --hosted-zone-id ${{ secrets.OR_PR_HOSTED_ZONE_ID }} --change-batch file://route53-changes.json
|
||||
|
|
|
|||
16
.github/workflows/pr-env.yaml
vendored
16
.github/workflows/pr-env.yaml
vendored
|
|
@ -329,10 +329,12 @@ jobs:
|
|||
# run: |
|
||||
# # Add any cleanup commands if necessary
|
||||
|
||||
- name: Debug Job
|
||||
if: failure()
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
16
.github/workflows/sourcemaps-reader-ee.yaml
vendored
16
.github/workflows/sourcemaps-reader-ee.yaml
vendored
|
|
@ -132,11 +132,13 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
|
|
|||
16
.github/workflows/sourcemaps-reader.yaml
vendored
16
.github/workflows/sourcemaps-reader.yaml
vendored
|
|
@ -131,11 +131,13 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
|
||||
|
|
|
|||
2
.github/workflows/tracker-tests.yaml
vendored
2
.github/workflows/tracker-tests.yaml
vendored
|
|
@ -72,4 +72,4 @@ jobs:
|
|||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: tracker
|
||||
name: tracker
|
||||
iame: tracker
|
||||
|
|
|
|||
35
.github/workflows/update-tag.yaml
vendored
Normal file
35
.github/workflows/update-tag.yaml
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
description: 'This workflow will build for patches for latest tag, and will Always use commit from main branch.'
|
||||
inputs:
|
||||
services:
|
||||
description: 'This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false'
|
||||
required: true
|
||||
default: 'false'
|
||||
|
||||
name: Force Push tag with main branch HEAD
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Build Patch from main
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set Remote with GITHUB_TOKEN
|
||||
run: |
|
||||
git config --unset http.https://github.com/.extraheader
|
||||
git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
- name: Push main branch to tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
git checkout main
|
||||
git push origin HEAD:refs/tags/$(git describe --tags `git rev-list --tags --max-count=1`) --force
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
17
.github/workflows/workers-ee.yaml
vendored
17
.github/workflows/workers-ee.yaml
vendored
|
|
@ -169,11 +169,12 @@ jobs:
|
|||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
17
.github/workflows/workers.yaml
vendored
17
.github/workflows/workers.yaml
vendored
|
|
@ -166,11 +166,12 @@ jobs:
|
|||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
|
||||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: 'Build failed :bomb:'
|
||||
# - name: Debug Job
|
||||
# if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}
|
||||
# ENVIRONMENT: staging
|
||||
#
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
|
|
|
|||
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -1,3 +0,0 @@
|
|||
[submodule "ee/intelligent_search/llama"]
|
||||
path = ee/intelligent_search/llama
|
||||
url = https://github.com/facebookresearch/llama.git
|
||||
10
README.md
10
README.md
|
|
@ -38,15 +38,11 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/openreplay/openreplay">
|
||||
<img src="static/openreplay-git-hero.svg">
|
||||
</a>
|
||||
</p>
|
||||
https://github.com/openreplay/openreplay/assets/20417222/684133c4-575a-48a7-aa91-d4bf88c5436a
|
||||
|
||||
OpenReplay is a session replay suite you can host yourself, that lets you see what users do on your web app, helping you troubleshoot issues faster.
|
||||
OpenReplay is a session replay suite you can host yourself, that lets you see what users do on your web and mobile apps, helping you troubleshoot issues faster.
|
||||
|
||||
- **Session replay.** OpenReplay replays what users do, but not only. It also shows you what went under the hood, how your website or app behaves by capturing network activity, console logs, JS errors, store actions/state, page speed metrics, cpu/memory usage and much more.
|
||||
- **Session replay**. OpenReplay replays what users do, but not only. It also shows you what went under the hood, how your website or app behaves by capturing network activity, console logs, JS errors, store actions/state, page speed metrics, cpu/memory usage and much more. In addition to web applications, iOS and React Native apps are also supported (Android and Flutter are coming out soon).
|
||||
- **Low footprint**. With a ~26KB (.br) tracker that asynchronously sends minimal data for a very limited impact on performance.
|
||||
- **Self-hosted**. No more security compliance checks, 3rd-parties processing user data. Everything OpenReplay captures stays in your cloud for a complete control over your data.
|
||||
- **Privacy controls**. Fine-grained security features for sanitizing user data.
|
||||
|
|
|
|||
12
README_AR.md
12
README_AR.md
|
|
@ -38,16 +38,12 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/openreplay/openreplay">
|
||||
<img src="static/openreplay-git-hero.svg">
|
||||
</a>
|
||||
</p>
|
||||
https://github.com/openreplay/openreplay/assets/20417222/684133c4-575a-48a7-aa91-d4bf88c5436a
|
||||
|
||||
OpenReplay هو مجموعة إعادة تشغيل الجلسة التي يمكنك استضافتها بنفسك، والتي تتيح لك رؤية ما يقوم به المستخدمون على تطبيق الويب الخاص بك، مما يساعدك على حل المشكلات بشكل أسرع.
|
||||
OpenReplay هو مجموعة إعادة تشغيل الجلسة التي يمكنك استضافتها بنفسك، والتي تتيح لك رؤية ما يقوم به المستخدمون على تطبيق الويب و تطبيقات الهاتف المحمول الخاص بك، مما يساعدك على حل المشكلات بشكل أسرع.
|
||||
|
||||
- **إعادة تشغيل الجلسة.** يقوم OpenReplay بإعادة تشغيل ما يقوم به المستخدمون، وكيف يتصرف موقع الويب الخاص بك أو التطبيق من خلال التقاط النشاط على الشبكة، وسجلات وحدة التحكم، وأخطاء JavaScript، وإجراءات/حالة التخزين، وقياسات سرعة الصفحة، واستخدام وحدة المعالجة المركزية/الذاكرة، وأكثر من ذلك بكثير.
|
||||
- **بصمة منخفضة**. مع متتبع بحجم حوالي 26 كيلوبايت (نوع .br) الذي يرسل بيانات دقيقة بشكل غير متزامن لتأثير محدود جدًا على الأداء.
|
||||
- **إعادة تشغيل الجلسة**. يقوم OpenReplay بإعادة تشغيل ما يقوم به المستخدمون، وكيف يتصرف موقع الويب الخاص بك أو التطبيق من خلال التقاط النشاط على الشبكة، وسجلات وحدة التحكم، وأخطاء JavaScript، وإجراءات/حالة التخزين، وقياسات سرعة الصفحة، واستخدام وحدة المعالجة المركزية/الذاكرة، وأكثر من ذلك بكثير. بالإضافة إلى تطبيقات الويب، تطبيقات نظام iOS و React Native مدعومة أيضاً (سيتم إطلاق نسخ Android و Flutter قريباً).
|
||||
- **بصمة منخفضة**. مع متتبع بحجم حوالي 26 كيلوبايت (نوع .br) الذي يرسل بيانات دقيقة بشكل غير متزامن لتأثير محدود جدًا على الأداء.
|
||||
- **مضيف بواسطتك.** لا مزيد من فحوص الامتثال الأمني، ومعالجة بيانات المستخدمين من قبل جهات خارجية. كل ما يتم التقاطه بواسطة OpenReplay يبقى في سحابتك للتحكم الكامل في بياناتك.
|
||||
- **ضوابط الخصوصية.** ميزات أمان دقيقة لتنقية بيانات المستخدم.
|
||||
- **نشر سهل.** بدعم من مزودي الخدمة السحابية العامة الرئيسيين (AWS، GCP، Azure، DigitalOcean).
|
||||
|
|
|
|||
|
|
@ -38,15 +38,11 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/openreplay/openreplay">
|
||||
<img src="static/openreplay-git-hero.svg">
|
||||
</a>
|
||||
</p>
|
||||
https://github.com/openreplay/openreplay/assets/20417222/684133c4-575a-48a7-aa91-d4bf88c5436a
|
||||
|
||||
OpenReplay es una suite de retransmisión de sesiones que puedes alojar tú mismo, lo que te permite ver lo que hacen los usuarios en tu aplicación web y ayudarte a solucionar problemas más rápido.
|
||||
OpenReplay es una suite de retransmisión de sesiones que puedes alojar tú mismo, lo que te permite ver lo que hacen los usuarios en tu aplicación web y móviles y ayudarte a solucionar problemas más rápido.
|
||||
|
||||
- **Reproducción de sesiones.** OpenReplay reproduce lo que hacen los usuarios, pero no solo eso. También te muestra lo que ocurre bajo el capó, cómo se comporta tu sitio web o aplicación al capturar la actividad de la red, registros de la consola, errores de JavaScript, acciones/estado del almacén, métricas de velocidad de la página, uso de CPU/memoria y mucho más.
|
||||
- **Reproducción de sesiones**. OpenReplay reproduce lo que hacen los usuarios, pero no solo eso. También te muestra lo que ocurre bajo el capó, cómo se comporta tu sitio web o aplicación al capturar la actividad de la red, registros de la consola, errores de JavaScript, acciones/estado del almacén, métricas de velocidad de la página, uso de CPU/memoria y mucho más. Además de las aplicaciones web, las aplicaciones de iOS y React Native también son compatibles (las versiones de Android y Flutter saldrán pronto).
|
||||
- **Huella reducida.** Con un rastreador de aproximadamente 26 KB (.br) que envía datos mínimos de forma asíncrona, lo que tiene un impacto muy limitado en el rendimiento.
|
||||
- **Auto-alojado.** No más verificaciones de cumplimiento de seguridad, procesamiento de datos de usuario por terceros. Todo lo que OpenReplay captura se queda en tu nube para un control completo sobre tus datos.
|
||||
- **Controles de privacidad.** Funciones de seguridad detalladas para desinfectar los datos de usuario.
|
||||
|
|
@ -57,6 +53,7 @@ OpenReplay es una suite de retransmisión de sesiones que puedes alojar tú mism
|
|||
- **Reproducción de sesiones:** Te permite revivir la experiencia de tus usuarios, ver dónde encuentran dificultades y cómo afecta su comportamiento. Cada reproducción de sesión se analiza automáticamente en función de heurísticas, para un triaje sencillo.
|
||||
- **Herramientas de desarrollo (DevTools):** Es como depurar en tu propio navegador. OpenReplay te proporciona el contexto completo (actividad de red, errores de JavaScript, acciones/estado del almacén y más de 40 métricas) para que puedas reproducir instantáneamente errores y entender problemas de rendimiento.
|
||||
- **Asistencia (Assist):** Te ayuda a brindar soporte a tus usuarios al ver su pantalla en tiempo real y unirte instantáneamente a una llamada (WebRTC) con ellos, sin necesidad de software de uso compartido de pantalla de terceros.
|
||||
- **Banderas de características:** Habilitar o deshabilitar una característica, hacer lanzamientos graduales y pruebas A/B sin necesidad de volver a desplegar tu aplicación.
|
||||
- **Búsqueda universal (Omni-search):** Busca y filtra por casi cualquier acción/criterio de usuario, atributo de sesión o evento técnico, para que puedas responder a cualquier pregunta. No se requiere instrumentación.
|
||||
- **Embudos (Funnels):** Para resaltar los problemas más impactantes que causan la conversión y la pérdida de ingresos.
|
||||
- **Controles de privacidad detallados:** Elige qué capturar, qué ocultar o qué ignorar para que los datos de usuario ni siquiera lleguen a tus servidores.
|
||||
|
|
|
|||
11
README_FR.md
11
README_FR.md
|
|
@ -38,15 +38,11 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/openreplay/openreplay">
|
||||
<img src="static/openreplay-git-hero.svg">
|
||||
</a>
|
||||
</p>
|
||||
https://github.com/openreplay/openreplay/assets/20417222/684133c4-575a-48a7-aa91-d4bf88c5436a
|
||||
|
||||
OpenReplay est une suite d'outils de relecture (appelée aussi "replay") de sessions que vous pouvez héberger vous-même, vous permettant de voir ce que les utilisateurs font sur une application web, vous aidant ainsi à résoudre différents types de problèmes plus rapidement.
|
||||
OpenReplay est une suite d'outils de relecture (appelée aussi "replay") de sessions que vous pouvez héberger vous-même, vous permettant de voir ce que les utilisateurs font sur une application web ou mobile, vous aidant ainsi à résoudre différents types de problèmes plus rapidement.
|
||||
|
||||
- **Relecture de session.** OpenReplay rejoue ce que les utilisateurs font, mais pas seulement. Il vous montre également ce qui se passe en coulisse, comment votre site web ou votre application se comporte en capturant l'activité réseau, les journaux de console, les erreurs JS, les actions/états du store, les métriques de chargement des pages, l'utilisation du CPU/mémoire, et bien plus encore.
|
||||
- **Relecture de session**. OpenReplay rejoue ce que les utilisateurs font, mais pas seulement. Il vous montre également ce qui se passe en coulisse, comment votre site web ou votre application se comporte en capturant l'activité réseau, les journaux de console, les erreurs JS, les actions/états du store, les métriques de chargement des pages, l'utilisation du CPU/mémoire, et bien plus encore. En plus des applications web, les applications iOS et React Native sont également prises en charge (les versions Android et Flutter seront bientôt disponibles).
|
||||
- **Faible empreinte**. Avec un traqueur d'environ 26 Ko (.br) qui envoie de manière asynchrone des données minimales, ce qui a un impact très limité sur les performances.
|
||||
- **Auto-hébergé**. Plus de vérifications de conformité en matière de sécurité, plus de traitement des données des utilisateurs par des tiers. Tout ce qu'OpenReplay capture reste dans votre cloud pour un contrôle complet sur vos données.
|
||||
- **Contrôles de confidentialité**. Fonctionnalités de sécurité détaillées pour la désinfection des données utilisateur.
|
||||
|
|
@ -57,6 +53,7 @@ OpenReplay est une suite d'outils de relecture (appelée aussi "replay") de sess
|
|||
- **Relecture de session :** Vous permet de revivre l'expérience de vos utilisateurs, de voir où ils rencontrent des problèmes et comment cela affecte leur comportement. Chaque relecture de session est automatiquement analysée en se basant sur des heuristiques, pour un triage plus facile des problèmes en fonction de l'impact.
|
||||
- **Outils de développement (DevTools) :** C'est comme déboguer dans votre propre navigateur. OpenReplay vous fournit le contexte complet (activité réseau, erreurs JS, actions/états du store et plus de 40 métriques) pour que vous puissiez instantanément reproduire les bugs et comprendre les problèmes de performance.
|
||||
- **Assistance (Assist) :** Vous aide à soutenir vos utilisateurs en voyant leur écran en direct et en vous connectant instantanément avec eux via appel/vidéo (WebRTC), sans nécessiter de logiciel tiers de partage d'écran.
|
||||
- **Drapeaux de fonctionnalité :** Activer ou désactiver une fonctionnalité, faire des déploiements progressifs et des tests A/B sans avoir à redéployer votre application.
|
||||
- **Recherche universelle (Omni-search) :** Recherchez et filtrez presque n'importe quelle action/critère utilisateur, attribut de session ou événement technique, afin de pouvoir répondre à n'importe quelle question. Aucune instrumentation requise.
|
||||
- **Entonnoirs (Funnels) :** Pour mettre en évidence les problèmes les plus impactants entraînant une conversion et une perte de revenus.
|
||||
- **Contrôles de confidentialité détaillés :** Choisissez ce que vous voulez capturer, ce que vous voulez obscurcir ou ignorer, de sorte que les données utilisateur n'atteignent même pas vos serveurs.
|
||||
|
|
|
|||
11
README_RU.md
11
README_RU.md
|
|
@ -38,15 +38,11 @@
|
|||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/openreplay/openreplay">
|
||||
<img src="static/openreplay-git-hero.svg">
|
||||
</a>
|
||||
</p>
|
||||
https://github.com/openreplay/openreplay/assets/20417222/684133c4-575a-48a7-aa91-d4bf88c5436a
|
||||
|
||||
OpenReplay - это набор инструментов для воспроизведения пользовательских сессий, позволяющий увидеть действия пользователи в вашем веб-приложении, который вы можете разместить в своем облаке или на серверах.
|
||||
OpenReplay - это набор инструментов для воспроизведения сессий, который вы можете разместить самостоятельно, позволяющий вам видеть, что пользователи делают в ваших веб- и мобильных приложениях, помогая вам быстрее устранять проблемы.
|
||||
|
||||
- **Воспроизведение сессий.** OpenReplay не только воспроизводит действия пользователей, но и показывает, что происходит под капотом сессии, как ведет себя ваш сайт или приложение, фиксируя сетевую активность, логи консоли, JS-ошибки, действия/состояние стейт менеджеров, показатели скорости страницы, использование процессора/памяти и многое другое.
|
||||
- **Воспроизведение сессий**. OpenReplay не только воспроизводит действия пользователей, но и показывает, что происходит под капотом сессии, как ведет себя ваш сайт или приложение, фиксируя сетевую активность, логи консоли, JS-ошибки, действия/состояние стейт менеджеров, показатели скорости страницы, использование процессора/памяти и многое другое. В дополнение к веб-приложениям, также поддерживаются приложения для iOS и React Native (приложения для Android и Flutter скоро появятся).
|
||||
- **Компактность**. Размером всего в ~26 КБ (.br), трекер асинхронно отправляет минимальное количество данных, оказывая очень незначительное влияние на производительность вашего приложения.
|
||||
- **Self-hosted**. Больше никаких проверок на соответствие требованиям безопасности или обработки данных ваших пользователей третьими сторонами. Все, что фиксирует OpenReplay, остается в вашем облаке, что обеспечивает полный контроль над вашими данными.
|
||||
- **Контроль над приватностью**. Тонкие настройки приватности позволяют записывать только действительно необходимые данные.
|
||||
|
|
@ -57,6 +53,7 @@ OpenReplay - это набор инструментов для воспроиз
|
|||
- **Session Replay:** Позволяет повторить опыт пользователей, увидеть, где они испытывают трудности и как это влияет на конверсию. Каждый реплей автоматически анализируется на наличие ошибок и аномалий, что значительно облегчает сортировку и поиск проблемных сессий.
|
||||
- **DevTools:** Прямо как отладка в вашем собственном браузере. OpenReplay предоставляет вам полный контекст (сетевая активность, JS ошибки, действия/состояние стейт менеджеров и более 40 метрик), чтобы вы могли мгновенно воспроизвести ошибки и найти проблемы с производительностью.
|
||||
- **Assist:** Позволяет вам помочь вашим пользователям, наблюдая их экран в настоящем времени и мгновенно переходя на звонок (WebRTC) с ними, не требуя стороннего программного обеспечения для совместного просмотра экрана.
|
||||
- **Функциональные флаги:** Включение или отключение функции, поэтапный выпуск и A/B тестирование без необходимости повторного развертывания вашего приложения.
|
||||
- **Omni-search:** Поиск и фильтрация практически любого действия пользователя/критерия, атрибута сессии или технического события, чтобы вы могли ответить на любой вопрос.
|
||||
- **Воронки:** Для выявления наиболее влияющих на конверсию мест.
|
||||
- **Тонкая настройка приватности:** Выбирайте, что записывать, а что игнорировать, чтобы данные пользователя даже не отправлялись на ваши сервера.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.11-alpine
|
||||
FROM python:3.12.3-alpine
|
||||
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
|
||||
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
||||
ARG GIT_SHA
|
||||
|
|
|
|||
|
|
@ -56,7 +56,9 @@ async def lifespan(app: FastAPI):
|
|||
"application_name": "AIO" + config("APP_NAME", default="PY"),
|
||||
}
|
||||
|
||||
database = psycopg_pool.AsyncConnectionPool(kwargs=database, connection_class=ORPYAsyncConnection)
|
||||
database = psycopg_pool.AsyncConnectionPool(kwargs=database, connection_class=ORPYAsyncConnection,
|
||||
min_size=config("PG_AIO_MINCONN", cast=int, default=1),
|
||||
max_size=config("PG_AIO_MAXCONN", cast=int, default=5), )
|
||||
app.state.postgresql = database
|
||||
|
||||
# App listening
|
||||
|
|
|
|||
|
|
@ -15,6 +15,9 @@ exit_err() {
|
|||
fi
|
||||
}
|
||||
|
||||
source ../scripts/lib/_docker.sh
|
||||
ARCH=${ARCH:-'amd64'}
|
||||
|
||||
environment=$1
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
|
|
@ -66,7 +69,7 @@ function build_api() {
|
|||
tag="ee-"
|
||||
}
|
||||
mv Dockerfile.dockerignore .dockerignore
|
||||
docker build -f ./Dockerfile --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/${IMAGE_NAME:-'chalice'}:${image_tag} .
|
||||
docker build -f ./Dockerfile --platform linux/${ARCH} --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/${IMAGE_NAME:-'chalice'}:${image_tag} .
|
||||
cd ../api || exit_err 100
|
||||
rm -rf ../${destination}
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
envarg="default-foss"
|
||||
source ../scripts/lib/_docker.sh
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
|
|
@ -17,27 +18,26 @@ check_prereq() {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_alerts(){
|
||||
function build_alerts() {
|
||||
destination="_alerts"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_alerts_ee"
|
||||
|
|
@ -69,5 +69,5 @@ function build_alerts(){
|
|||
check_prereq
|
||||
build_alerts $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release alerts
|
||||
update_helm_release alerts
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
|
||||
envarg="default-foss"
|
||||
source ../scripts/lib/_docker.sh
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
|
|
@ -17,7 +18,7 @@ check_prereq() {
|
|||
[[ exit -eq 1 ]] && exit 1
|
||||
}
|
||||
|
||||
function build_crons(){
|
||||
function build_crons() {
|
||||
destination="_crons_ee"
|
||||
cp -R ../api ../${destination}
|
||||
cd ../${destination}
|
||||
|
|
@ -46,7 +47,6 @@ check_prereq
|
|||
[[ $1 == "ee" ]] && {
|
||||
build_crons $1
|
||||
} || {
|
||||
echo -e "Crons is only for ee. Rerun the script using \n bash $0 ee"
|
||||
exit 100
|
||||
echo -e "Crons is only for ee. Rerun the script using \n bash $0 ee"
|
||||
exit 100
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -193,10 +193,10 @@ def __merge_metric_with_data(metric: schemas.CardSchema,
|
|||
if data.series is not None and len(data.series) > 0:
|
||||
metric.series = data.series
|
||||
|
||||
if len(data.filters) > 0:
|
||||
for s in metric.series:
|
||||
s.filter.filters += data.filters
|
||||
metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
|
||||
# if len(data.filters) > 0:
|
||||
# for s in metric.series:
|
||||
# s.filter.filters += data.filters
|
||||
# metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
|
||||
return metric
|
||||
|
||||
|
||||
|
|
@ -257,11 +257,11 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
|||
|
||||
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
return []
|
||||
data.series[0].filter.startTimestamp = data.startTimestamp
|
||||
data.series[0].filter.endTimestamp = data.endTimestamp
|
||||
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||
return {"data": data}
|
||||
return data
|
||||
|
||||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
|
|
|
|||
|
|
@ -282,14 +282,31 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
extra_event = None
|
||||
extra_conditions = None
|
||||
if metric_of == schemas.MetricOfTable.visited_url:
|
||||
extra_event = "events.pages"
|
||||
extra_conditions = {}
|
||||
for e in data.events:
|
||||
if e.type == schemas.EventType.location:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
extra_conditions[e.operator].value.append(v)
|
||||
extra_conditions = list(extra_conditions.values())
|
||||
|
||||
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None, extra_event=extra_event)
|
||||
user_id=None, extra_event=extra_event, extra_conditions=extra_conditions)
|
||||
full_args["step_size"] = step_size
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if isinstance(metric_of, schemas.MetricOfTable):
|
||||
|
|
@ -400,7 +417,7 @@ def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
|||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, platform="web", extra_event=None):
|
||||
project_id, user_id, platform="web", extra_event=None, extra_conditions=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
@ -1085,6 +1102,24 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
extra_join += f"""INNER JOIN {extra_event} AS ev USING(session_id)"""
|
||||
extra_constraints.append("ev.timestamp>=%(startDate)s")
|
||||
extra_constraints.append("ev.timestamp<=%(endDate)s")
|
||||
if extra_conditions and len(extra_conditions) > 0:
|
||||
_extra_or_condition = []
|
||||
for i, c in enumerate(extra_conditions):
|
||||
if sh.isAny_opreator(c.operator):
|
||||
continue
|
||||
e_k = f"ec_value{i}"
|
||||
op = sh.get_sql_operator(c.operator)
|
||||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**sh.multi_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
_extra_or_condition.append(
|
||||
sh.multi_conditions(f"ev.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
query_part = f"""\
|
||||
FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"}
|
||||
{extra_join}
|
||||
|
|
|
|||
|
|
@ -125,16 +125,15 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
|
|||
return {"errors": ["Note not found"]}
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id, note_id):
|
||||
def delete(project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id})
|
||||
{"project_id": project_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
__author__ = "AZNAUROV David"
|
||||
__maintainer__ = "KRAIEM Taha Yassine"
|
||||
|
||||
import logging
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, sessions
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
"""
|
||||
|
|
@ -57,30 +54,27 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
op = sh.get_sql_operator(f.operator)
|
||||
|
||||
filter_type = f.type
|
||||
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
|
||||
f_k = f"f_value{i}"
|
||||
values = {**values,
|
||||
**sh.multi_values(helper.values_for_operator(value=f.value, op=f.operator),
|
||||
value_key=f_k)}
|
||||
**sh.multi_values(f.value, value_key=f_k)}
|
||||
is_not = False
|
||||
if sh.is_negation_operator(f.operator):
|
||||
is_not = True
|
||||
if filter_type == schemas.FilterType.user_browser:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.duration:
|
||||
if len(f.value) > 0 and f.value[0] is not None:
|
||||
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
|
||||
|
|
@ -91,35 +85,30 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
|
|||
elif filter_type == schemas.FilterType.referrer:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
if f.source in meta_keys.keys():
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(
|
||||
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
|
||||
value_key=f_k))
|
||||
is_not=is_not, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_anonymous_id,
|
||||
schemas.FilterType.user_anonymous_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
i = -1
|
||||
for s in stages:
|
||||
|
|
@ -553,35 +542,11 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
|
|||
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
|
||||
output = []
|
||||
stages = filter_d.events
|
||||
# TODO: handle 1 stage alone
|
||||
|
||||
if len(stages) == 0:
|
||||
logging.debug("no stages found")
|
||||
return output, 0
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output = [{
|
||||
"type": stages[0].type,
|
||||
"value": stages[0].value,
|
||||
"dropPercentage": None,
|
||||
"operator": stages[0].operator,
|
||||
"sessionsCount": 0,
|
||||
"dropPct": 0,
|
||||
"usersCount": 0,
|
||||
"dropDueToIssues": 0
|
||||
|
||||
}]
|
||||
# original
|
||||
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# first change
|
||||
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# last change
|
||||
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
|
||||
project_id=project_id, user_id=None, count_only=True)
|
||||
output[0]["sessionsCount"] = counts["countSessions"]
|
||||
output[0]["usersCount"] = counts["countUsers"]
|
||||
return output, 0
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
|
|
|
|||
|
|
@ -249,7 +249,8 @@ def get_issue_title(issue_type):
|
|||
'custom': "Custom Event",
|
||||
'js_exception': "Error",
|
||||
'custom_event_error': "Custom Error",
|
||||
'js_error': "Error"}.get(issue_type, issue_type)
|
||||
'js_error': "Error",
|
||||
"mouse_thrashing": "Mouse Thrashing"}.get(issue_type, issue_type)
|
||||
|
||||
|
||||
def __progress(old_val, new_val):
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ js_cache_bucket=
|
|||
jwt_algorithm=HS512
|
||||
JWT_EXPIRATION=6000
|
||||
JWT_ISSUER=openReplay-dev
|
||||
JWT_REFRESH_EXPIRATION=60
|
||||
JWT_REFRESH_EXPIRATION=604800
|
||||
JWT_REFRESH_SECRET=SECRET2
|
||||
jwt_secret=SECRET
|
||||
LOCAL_DEV=true
|
||||
|
|
|
|||
|
|
@ -481,8 +481,7 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
|
|||
|
||||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId)
|
||||
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -658,6 +658,18 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
||||
@model_validator(mode='before')
|
||||
def _transform_data(cls, values):
|
||||
if values.get("source") is not None:
|
||||
if isinstance(values["source"], list):
|
||||
if len(values["source"]) == 0:
|
||||
values["source"] = None
|
||||
elif len(values["source"]) == 1:
|
||||
values["source"] = values["source"][0]
|
||||
else:
|
||||
raise ValueError(f"Unsupported multi-values source")
|
||||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def filter_validator(cls, values):
|
||||
if values.type == FilterType.metadata:
|
||||
|
|
@ -772,9 +784,12 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
|
||||
@field_validator("filters", mode="after")
|
||||
def merge_identical_filters(cls, values):
|
||||
# ignore 'issue' type as it could be used for step-filters and tab-filters at the same time
|
||||
i = 0
|
||||
while i < len(values):
|
||||
if values[i].is_event:
|
||||
if values[i].is_event or values[i].type == FilterType.issue:
|
||||
if values[i].type == FilterType.issue:
|
||||
values[i] = remove_duplicate_values(values[i])
|
||||
i += 1
|
||||
continue
|
||||
j = i + 1
|
||||
|
|
|
|||
|
|
@ -9,19 +9,20 @@
|
|||
|
||||
# Helper function
|
||||
exit_err() {
|
||||
err_code=$1
|
||||
if [[ $err_code != 0 ]]; then
|
||||
exit "$err_code"
|
||||
fi
|
||||
err_code=$1
|
||||
if [[ $err_code != 0 ]]; then
|
||||
exit "$err_code"
|
||||
fi
|
||||
}
|
||||
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
app="assist-stats" # Set the app variable to "chalice"
|
||||
app="assist-stats" # Set the app variable to "chalice"
|
||||
environment=$1
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
envarg="default-foss"
|
||||
chart="$app" # Use the app variable here
|
||||
chart="$app" # Use the app variable here
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
|
|
@ -32,31 +33,31 @@ check_prereq() {
|
|||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
[[ $ee == "true" ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
[[ $ee == "true" ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
function build_api() {
|
||||
destination="_assist_stats"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_assist_stats_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist-stats ../${destination}
|
||||
cd ../${destination} || exit_err 100
|
||||
|
|
@ -86,5 +87,5 @@ check_prereq
|
|||
build_api $environment
|
||||
echo buil_complete
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release
|
||||
update_helm_release
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -14,34 +14,35 @@ check_prereq() {
|
|||
exit 1
|
||||
}
|
||||
}
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
function build_api() {
|
||||
destination="_assist"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_assist_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist ../${destination}
|
||||
cd ../${destination}
|
||||
|
|
@ -68,5 +69,5 @@ function build_api(){
|
|||
check_prereq
|
||||
build_api $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release assist
|
||||
update_helm_release assist
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -40,4 +40,4 @@ module.exports = {
|
|||
socketsLiveByProject,
|
||||
socketsLiveBySession
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
@ -20,7 +20,7 @@ const extractTabId = (peerId) => {
|
|||
const extractPeerId = (peerId) => {
|
||||
let splited = peerId.split("-");
|
||||
if (splited.length < 2 || splited.length > 3) {
|
||||
debug && console.error(`cannot split peerId: ${peerId}`);
|
||||
// debug && console.error(`cannot split peerId: ${peerId}`);
|
||||
return {};
|
||||
}
|
||||
if (PROJECT_KEY_LENGTH > 0 && splited[0].length !== PROJECT_KEY_LENGTH) {
|
||||
|
|
|
|||
|
|
@ -15,16 +15,13 @@ const {
|
|||
RecordRequestDuration,
|
||||
IncreaseTotalRequests
|
||||
} = require('../utils/metrics');
|
||||
const {
|
||||
GetRoomInfo,
|
||||
GetRooms,
|
||||
GetSessions,
|
||||
} = require('../utils/rooms');
|
||||
const {fetchSockets} = require("./wsServer");
|
||||
const {IDENTITIES} = require("./assistHelper");
|
||||
|
||||
const debug_log = process.env.debug === "1";
|
||||
|
||||
const respond = function (req, res, data) {
|
||||
console.log("responding with data: ", data)
|
||||
console.log("responding with data: ", JSON.stringify(data))
|
||||
let result = {data}
|
||||
if (process.env.uws !== "true") {
|
||||
res.statusCode = 200;
|
||||
|
|
@ -38,8 +35,18 @@ const respond = function (req, res, data) {
|
|||
RecordRequestDuration(req.method.toLowerCase(), res.handlerName, 200, duration/1000.0);
|
||||
}
|
||||
|
||||
const getParticularSession = function (sessionId, filters) {
|
||||
const sessInfo = GetRoomInfo(sessionId);
|
||||
const getParticularSession = async function (roomId, filters) {
|
||||
let connected_sockets = await fetchSockets(roomId);
|
||||
if (connected_sockets.length === 0) {
|
||||
return null;
|
||||
}
|
||||
let sessInfo;
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
sessInfo = item.handshake.query.sessionInfo;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!sessInfo) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -52,23 +59,41 @@ const getParticularSession = function (sessionId, filters) {
|
|||
return null;
|
||||
}
|
||||
|
||||
const getAllSessions = function (projectKey, filters, onlineOnly= false) {
|
||||
const getAllSessions = async function (projectKey, filters, onlineOnly= false) {
|
||||
const sessions = [];
|
||||
const allRooms = onlineOnly ? GetSessions(projectKey) : GetRooms(projectKey);
|
||||
const connected_sockets = await fetchSockets();
|
||||
if (connected_sockets.length === 0) {
|
||||
return sessions;
|
||||
}
|
||||
|
||||
for (let sessionId of allRooms) {
|
||||
let sessInfo = GetRoomInfo(sessionId);
|
||||
if (!sessInfo) {
|
||||
const rooms = new Map();
|
||||
for (let item of connected_sockets) {
|
||||
// Prefilter checks
|
||||
if (rooms.has(item.handshake.query.roomId)) {
|
||||
continue;
|
||||
}
|
||||
if (item.handshake.query.projectKey !== projectKey || !item.handshake.query.sessionInfo) {
|
||||
continue;
|
||||
}
|
||||
if (onlineOnly && item.handshake.query.identity !== IDENTITIES.session) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Mark this room as visited
|
||||
rooms.set(item.handshake.query.roomId, true);
|
||||
|
||||
// Add session to the list without filtering
|
||||
if (!hasFilters(filters)) {
|
||||
sessions.push(sessInfo);
|
||||
sessions.push(item.handshake.query.sessionInfo);
|
||||
continue;
|
||||
}
|
||||
if (isValidSession(sessInfo, filters.filter)) {
|
||||
sessions.push(sessInfo);
|
||||
|
||||
// Add session to the list if it passes the filter
|
||||
if (isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
|
||||
sessions.push(item.handshake.query.sessionInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return sessions
|
||||
}
|
||||
|
||||
|
|
@ -83,11 +108,12 @@ const socketsListByProject = async function (req, res) {
|
|||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
return respond(req, res, getParticularSession(_sessionId, filters));
|
||||
const sessInfo = await getParticularSession(`${_projectKey}-${_sessionId}`, filters);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
|
||||
// find all sessions for a project
|
||||
const sessions = getAllSessions(_projectKey, filters);
|
||||
const sessions = await getAllSessions(_projectKey, filters);
|
||||
|
||||
// send response
|
||||
respond(req, res, sortPaginate(sessions, filters));
|
||||
|
|
@ -104,11 +130,12 @@ const socketsLiveByProject = async function (req, res) {
|
|||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
return respond(req, res, getParticularSession(_sessionId, filters));
|
||||
let sessInfo = await getParticularSession(`${_projectKey}-${_sessionId}`, filters);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
|
||||
// find all sessions for a project
|
||||
const sessions = getAllSessions(_projectKey, filters, true);
|
||||
const sessions = await getAllSessions(_projectKey, filters, true);
|
||||
|
||||
// send response
|
||||
respond(req, res, sortPaginate(sessions, filters));
|
||||
|
|
@ -119,12 +146,14 @@ const socketsLiveBySession = async function (req, res) {
|
|||
debug_log && console.log("[WS]looking for LIVE session");
|
||||
res.handlerName = 'socketsLiveBySession';
|
||||
|
||||
const _projectKey = extractProjectKeyFromRequest(req);
|
||||
const _sessionId = extractSessionIdFromRequest(req);
|
||||
const filters = await extractPayloadFromRequest(req, res);
|
||||
|
||||
// find a particular session
|
||||
if (_sessionId) {
|
||||
return respond(req, res, getParticularSession(_sessionId, filters));
|
||||
let sessInfo = await getParticularSession(`${_projectKey}-${_sessionId}`, filters);
|
||||
return respond(req, res, sessInfo);
|
||||
}
|
||||
return respond(req, res, null);
|
||||
}
|
||||
|
|
@ -140,14 +169,27 @@ const autocomplete = async function (req, res) {
|
|||
if (!hasQuery(filters)) {
|
||||
return respond(req, res, results);
|
||||
}
|
||||
let allSessions = GetSessions(_projectKey);
|
||||
for (let sessionId of allSessions) {
|
||||
let sessInfo = GetRoomInfo(sessionId);
|
||||
if (!sessInfo) {
|
||||
|
||||
let connected_sockets = await fetchSockets();
|
||||
if (connected_sockets.length === 0) {
|
||||
return results;
|
||||
}
|
||||
|
||||
const rooms = new Map();
|
||||
for (let item of connected_sockets) {
|
||||
if (rooms.has(item.handshake.query.roomId)) {
|
||||
continue;
|
||||
}
|
||||
results = [...results, ...getValidAttributes(sessInfo, filters.query)];
|
||||
if (item.handshake.query.sessionInfo) {
|
||||
if ((item.handshake.query.projectKey !== _projectKey) || (item.handshake.query.identity !== IDENTITIES.session)) {
|
||||
continue;
|
||||
}
|
||||
// Mark this room as visited
|
||||
rooms.set(item.handshake.query.roomId, true);
|
||||
results.push(...getValidAttributes(item.handshake.query.sessionInfo, filters.query))
|
||||
}
|
||||
}
|
||||
|
||||
respond(req, res, uniqueAutocomplete(results));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
const roomsInfo = new Map(); // sessionID -> sessionInfo
|
||||
const projectSessions = new Map(); // projectKey -> Set(sessionIDs) // all rooms (even with agent only)
|
||||
const projectRooms = new Map(); // projectKey -> Set(roomIDs) // online rooms
|
||||
|
||||
function AddRoom(projKey, sessID, sessInfo) {
|
||||
roomsInfo.set(sessID, sessInfo);
|
||||
if (!projectRooms.has(projKey)) {
|
||||
projectRooms.set(projKey, new Set());
|
||||
}
|
||||
projectRooms.get(projKey).add(sessID);
|
||||
if (!projectSessions.has(projKey)) {
|
||||
projectSessions.set(projKey, new Set());
|
||||
}
|
||||
projectSessions.get(projKey).add(sessID);
|
||||
}
|
||||
|
||||
function UpdateRoom(sessID, sessInfo) {
|
||||
roomsInfo.set(sessID, sessInfo);
|
||||
}
|
||||
|
||||
function DeleteSession(projKey, sessID) {
|
||||
projectSessions.get(projKey)?.delete(sessID);
|
||||
}
|
||||
|
||||
function DeleteRoom(projKey, sessID) {
|
||||
projectRooms.get(projKey)?.delete(sessID);
|
||||
}
|
||||
|
||||
function GetRoomInfo(sessID) {
|
||||
return roomsInfo.get(sessID);
|
||||
}
|
||||
|
||||
function GetRooms(projectKey) {
|
||||
return projectRooms.get(projectKey) || new Set();
|
||||
}
|
||||
|
||||
function GetSessions(projectKey) {
|
||||
return projectSessions.get(projectKey) || new Set();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AddRoom,
|
||||
UpdateRoom,
|
||||
DeleteRoom,
|
||||
DeleteSession,
|
||||
GetRoomInfo,
|
||||
GetRooms,
|
||||
GetSessions,
|
||||
}
|
||||
|
|
@ -23,12 +23,6 @@ const {
|
|||
IncreaseOnlineRooms,
|
||||
DecreaseOnlineRooms,
|
||||
} = require('../utils/metrics');
|
||||
const {
|
||||
AddRoom,
|
||||
UpdateRoom,
|
||||
DeleteRoom,
|
||||
DeleteSession,
|
||||
} = require('../utils/rooms');
|
||||
|
||||
const debug_log = process.env.debug === "1";
|
||||
const error_log = process.env.ERROR === "1";
|
||||
|
|
@ -36,12 +30,12 @@ const error_log = process.env.ERROR === "1";
|
|||
const findSessionSocketId = async (io, roomId, tabId) => {
|
||||
let pickFirstSession = tabId === undefined;
|
||||
const connected_sockets = await io.in(roomId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session) {
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
if (pickFirstSession) {
|
||||
return item.id;
|
||||
} else if (item.tabId === tabId) {
|
||||
return item.id;
|
||||
return socket.id;
|
||||
} else if (socket.handshake.query.tabId === tabId) {
|
||||
return socket.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -52,13 +46,13 @@ async function getRoomData(io, roomID) {
|
|||
let tabsCount = 0, agentsCount = 0, tabIDs = [], agentIDs = [];
|
||||
const connected_sockets = await io.in(roomID).fetchSockets();
|
||||
if (connected_sockets.length > 0) {
|
||||
for (let sock of connected_sockets) {
|
||||
if (sock.handshake.query.identity === IDENTITIES.session) {
|
||||
for (let socket of connected_sockets) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
tabsCount++;
|
||||
tabIDs.push(sock.tabId);
|
||||
tabIDs.push(socket.handshake.query.tabId);
|
||||
} else {
|
||||
agentsCount++;
|
||||
agentIDs.push(sock.id);
|
||||
agentIDs.push(socket.id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
@ -70,32 +64,29 @@ async function getRoomData(io, roomID) {
|
|||
|
||||
function processNewSocket(socket) {
|
||||
socket._connectedAt = new Date();
|
||||
socket.identity = socket.handshake.query.identity;
|
||||
socket.peerId = socket.handshake.query.peerId;
|
||||
let {projectKey: connProjectKey, sessionId: connSessionId, tabId: connTabId} = extractPeerId(socket.peerId);
|
||||
socket.roomId = `${connProjectKey}-${connSessionId}`;
|
||||
socket.projectId = socket.handshake.query.projectId;
|
||||
socket.projectKey = connProjectKey;
|
||||
socket.sessId = connSessionId;
|
||||
socket.tabId = connTabId;
|
||||
debug_log && console.log(`connProjectKey:${connProjectKey}, connSessionId:${connSessionId}, connTabId:${connTabId}, roomId:${socket.roomId}`);
|
||||
let {projectKey: connProjectKey, sessionId: connSessionId, tabId: connTabId} = extractPeerId(socket.handshake.query.peerId);
|
||||
socket.handshake.query.roomId = `${connProjectKey}-${connSessionId}`;
|
||||
socket.handshake.query.projectKey = connProjectKey;
|
||||
socket.handshake.query.sessId = connSessionId;
|
||||
socket.handshake.query.tabId = connTabId;
|
||||
debug_log && console.log(`connProjectKey:${connProjectKey}, connSessionId:${connSessionId}, connTabId:${connTabId}, roomId:${socket.handshake.query.roomId}`);
|
||||
}
|
||||
|
||||
async function onConnect(socket) {
|
||||
debug_log && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
processNewSocket(socket);
|
||||
IncreaseTotalWSConnections(socket.identity);
|
||||
IncreaseOnlineConnections(socket.identity);
|
||||
IncreaseTotalWSConnections(socket.handshake.query.identity);
|
||||
IncreaseOnlineConnections(socket.handshake.query.identity);
|
||||
|
||||
const io = getServer();
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.roomId);
|
||||
const {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
|
||||
if (socket.identity === IDENTITIES.session) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
// Check if session with the same tabID already connected, if so, refuse new connexion
|
||||
if (tabsCount > 0) {
|
||||
for (let tab of tabIDs) {
|
||||
if (tab === socket.tabId) {
|
||||
error_log && console.log(`session already connected, refusing new connexion, peerId: ${socket.peerId}`);
|
||||
if (tab === socket.handshake.query.tabId) {
|
||||
error_log && console.log(`session already connected, refusing new connexion, peerId: ${socket.handshake.query.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
return socket.disconnect();
|
||||
}
|
||||
|
|
@ -106,35 +97,34 @@ async function onConnect(socket) {
|
|||
// New session creates new room
|
||||
IncreaseTotalRooms();
|
||||
IncreaseOnlineRooms();
|
||||
AddRoom(socket.projectKey, socket.sessId, socket.handshake.query.sessionInfo);
|
||||
}
|
||||
// Inform all connected agents about reconnected session
|
||||
if (agentsCount > 0) {
|
||||
debug_log && console.log(`notifying new session about agent-existence`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agentIDs);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
} else if (tabsCount <= 0) {
|
||||
debug_log && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
|
||||
debug_log && console.log(`notifying new agent about no SESSIONS with peerId:${socket.handshake.query.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
await socket.join(socket.roomId);
|
||||
await socket.join(socket.handshake.query.roomId);
|
||||
|
||||
if (debug_log) {
|
||||
let connectedSockets = await io.in(socket.roomId).fetchSockets();
|
||||
let connectedSockets = await io.in(socket.handshake.query.roomId).fetchSockets();
|
||||
if (connectedSockets.length > 0) {
|
||||
console.log(`${socket.id} joined room:${socket.roomId}, as:${socket.identity}, members:${connectedSockets.length}`);
|
||||
console.log(`${socket.id} joined room:${socket.handshake.query.roomId}, as:${socket.handshake.query.identity}, members:${connectedSockets.length}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (socket.identity === IDENTITIES.agent) {
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
if (socket.handshake.query.agentInfo !== undefined) {
|
||||
socket.handshake.query.agentInfo = JSON.parse(socket.handshake.query.agentInfo);
|
||||
socket.agentID = socket.handshake.query.agentInfo.id;
|
||||
socket.handshake.query.agentID = socket.handshake.query.agentInfo.id;
|
||||
// Stats
|
||||
startAssist(socket, socket.agentID);
|
||||
startAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NEW_AGENT, socket.id, socket.handshake.query.agentInfo);
|
||||
}
|
||||
|
||||
// Set disconnect handler
|
||||
|
|
@ -153,50 +143,46 @@ async function onConnect(socket) {
|
|||
}
|
||||
|
||||
async function onDisconnect(socket) {
|
||||
DecreaseOnlineConnections(socket.identity);
|
||||
debug_log && console.log(`${socket.id} disconnected from ${socket.roomId}`);
|
||||
DecreaseOnlineConnections(socket.handshake.query.identity);
|
||||
debug_log && console.log(`${socket.id} disconnected from ${socket.handshake.query.roomId}`);
|
||||
|
||||
if (socket.identity === IDENTITIES.agent) {
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
if (socket.handshake.query.identity === IDENTITIES.agent) {
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
// Stats
|
||||
endAssist(socket, socket.agentID);
|
||||
endAssist(socket, socket.handshake.query.agentID);
|
||||
}
|
||||
debug_log && console.log("checking for number of connected agents and sessions");
|
||||
const io = getServer();
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.roomId);
|
||||
let {tabsCount, agentsCount, tabIDs, agentIDs} = await getRoomData(io, socket.handshake.query.roomId);
|
||||
|
||||
if (tabsCount === -1 && agentsCount === -1) {
|
||||
DecreaseOnlineRooms();
|
||||
debug_log && console.log(`room not found: ${socket.roomId}`);
|
||||
DeleteSession(socket.projectKey, socket.sessId);
|
||||
DeleteRoom(socket.projectKey, socket.sessId);
|
||||
debug_log && console.log(`room not found: ${socket.handshake.query.roomId}`);
|
||||
return;
|
||||
}
|
||||
if (tabsCount === 0) {
|
||||
debug_log && console.log(`notifying everyone in ${socket.roomId} about no SESSIONS`);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
DeleteSession(socket.projectKey, socket.sessId);
|
||||
debug_log && console.log(`notifying everyone in ${socket.handshake.query.roomId} about no SESSIONS`);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
if (agentsCount === 0) {
|
||||
debug_log && console.log(`notifying everyone in ${socket.roomId} about no AGENTS`);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
debug_log && console.log(`notifying everyone in ${socket.handshake.query.roomId} about no AGENTS`);
|
||||
socket.to(socket.handshake.query.roomId).emit(EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
}
|
||||
}
|
||||
|
||||
async function onUpdateEvent(socket, ...args) {
|
||||
debug_log && console.log(`${socket.id} sent update event.`);
|
||||
if (socket.identity !== IDENTITIES.session) {
|
||||
if (socket.handshake.query.identity !== IDENTITIES.session) {
|
||||
debug_log && console.log('Ignoring update event.');
|
||||
return
|
||||
}
|
||||
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
Object.assign(socket.handshake.query.sessionInfo, args[0].data, {tabId: args[0]?.meta?.tabId});
|
||||
UpdateRoom(socket.sessId, socket.handshake.query.sessionInfo);
|
||||
|
||||
// Update sessionInfo for all agents in the room
|
||||
const io = getServer();
|
||||
const connected_sockets = await io.in(socket.roomId).fetchSockets();
|
||||
const connected_sockets = await io.in(socket.handshake.query.roomId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
Object.assign(item.handshake.query.sessionInfo, args[0]?.data, {tabId: args[0]?.meta?.tabId});
|
||||
|
|
@ -212,17 +198,17 @@ async function onAny(socket, eventName, ...args) {
|
|||
return
|
||||
}
|
||||
args[0] = updateSessionData(socket, args[0])
|
||||
if (socket.identity === IDENTITIES.session) {
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.roomId}`);
|
||||
socket.to(socket.roomId).emit(eventName, args[0]);
|
||||
if (socket.handshake.query.identity === IDENTITIES.session) {
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to room:${socket.handshake.query.roomId}`);
|
||||
socket.to(socket.handshake.query.roomId).emit(eventName, args[0]);
|
||||
} else {
|
||||
// Stats
|
||||
handleEvent(eventName, socket, args[0]);
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.roomId}`);
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.handshake.query.identity}, sending message to session of room:${socket.handshake.query.roomId}`);
|
||||
const io = getServer();
|
||||
let socketId = await findSessionSocketId(io, socket.roomId, args[0]?.meta?.tabId);
|
||||
let socketId = await findSessionSocketId(io, socket.handshake.query.roomId, args[0]?.meta?.tabId);
|
||||
if (socketId === null) {
|
||||
debug_log && console.log(`session not found for:${socket.roomId}`);
|
||||
debug_log && console.log(`session not found for:${socket.handshake.query.roomId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
} else {
|
||||
debug_log && console.log("message sent");
|
||||
|
|
@ -233,8 +219,8 @@ async function onAny(socket, eventName, ...args) {
|
|||
|
||||
// Back compatibility (add top layer with meta information)
|
||||
function updateSessionData(socket, sessionData) {
|
||||
if (sessionData?.meta === undefined && socket.identity === IDENTITIES.session) {
|
||||
sessionData = {meta: {tabId: socket.tabId, version: 1}, data: sessionData};
|
||||
if (sessionData?.meta === undefined && socket.handshake.query.identity === IDENTITIES.session) {
|
||||
sessionData = {meta: {tabId: socket.handshake.query.tabId, version: 1}, data: sessionData};
|
||||
}
|
||||
return sessionData
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,16 @@ const getServer = function () {
|
|||
return io;
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
if (!io) {
|
||||
return [];
|
||||
}
|
||||
if (!roomID) {
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
return await io.in(roomID).fetchSockets();
|
||||
}
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (io) {
|
||||
return io;
|
||||
|
|
@ -26,4 +36,5 @@ const createSocketIOServer = function (server, prefix) {
|
|||
module.exports = {
|
||||
createSocketIOServer,
|
||||
getServer,
|
||||
fetchSockets,
|
||||
}
|
||||
|
|
@ -1,15 +1,8 @@
|
|||
# GSSAPI = true to enable Kerberos auth for Kafka and manually build librdkafka with GSSAPI support
|
||||
ARG GSSAPI=false
|
||||
|
||||
#ARCH can be amd64 or arm64
|
||||
ARG ARCH=amd64
|
||||
|
||||
FROM --platform=linux/$ARCH golang:1.21-alpine3.18 AS build
|
||||
RUN if [ "$GSSAPI" = "true" ]; then \
|
||||
apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5; \
|
||||
else \
|
||||
apk add --no-cache gcc g++ make libc-dev; \
|
||||
fi
|
||||
RUN apk add --no-cache --repository=https://dl-cdn.alpinelinux.org/alpine/edge/community git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5;
|
||||
WORKDIR /root
|
||||
|
||||
# Load code dependencies
|
||||
|
|
@ -24,11 +17,7 @@ COPY internal internal
|
|||
|
||||
# Build service
|
||||
ARG SERVICE_NAME
|
||||
RUN if [ "$GSSAPI" = "true" ]; then \
|
||||
CGO_ENABLED=1 GOOS=linux GOARCH=$ARCH go build -o service -tags dynamic openreplay/backend/cmd/$SERVICE_NAME; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOOS=linux GOARCH=$ARCH go build -o service -tags musl openreplay/backend/cmd/$SERVICE_NAME; \
|
||||
fi
|
||||
RUN CGO_ENABLED=1 GOOS=linux GOARCH=$ARCH go build -o service -tags dynamic openreplay/backend/cmd/$SERVICE_NAME
|
||||
|
||||
FROM --platform=linux/$ARCH alpine AS entrypoint
|
||||
ARG GIT_SHA
|
||||
|
|
@ -36,11 +25,7 @@ ARG GSSAPI=false
|
|||
LABEL GIT_SHA=$GIT_SHA
|
||||
LABEL GSSAPI=$GSSAPI
|
||||
|
||||
RUN if [ "$GSSAPI" = "true" ]; then \
|
||||
apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5; \
|
||||
else \
|
||||
apk add --no-cache ca-certificates cyrus-sasl cyrus-sasl-gssapiv2 krb5; \
|
||||
fi
|
||||
RUN apk add --no-cache --repository=https://dl-cdn.alpinelinux.org/alpine/edge/community ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
|
||||
ARG SERVICE_NAME
|
||||
|
|
|
|||
126
backend/build.sh
126
backend/build.sh
|
|
@ -9,86 +9,88 @@
|
|||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
|
||||
set -e
|
||||
|
||||
GIT_ROOT=$(git rev-parse --show-toplevel)
|
||||
source $GIT_ROOT/scripts/lib/_docker.sh
|
||||
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-$git_sha}
|
||||
ee="false"
|
||||
# Possible values: amd64, arm64
|
||||
arch="${ARCH:-"amd64"}"
|
||||
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
return
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
chart=$2
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
chart=$2
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_service() {
|
||||
image="$1"
|
||||
echo "BUILDING $image"
|
||||
docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/$arch --build-arg ARCH=$arch --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha --build-arg GSSAPI=${GSSAPI:-'false'} .
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/$image:${image_tag}
|
||||
}
|
||||
[[ $SIGN_IMAGE -eq 1 ]] && {
|
||||
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image:${image_tag}
|
||||
}
|
||||
echo "Build completed for $image"
|
||||
return
|
||||
image="$1"
|
||||
echo "BUILDING $image"
|
||||
docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/$arch --build-arg ARCH=$arch --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha .
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/$image:${image_tag}
|
||||
}
|
||||
[[ $SIGN_IMAGE -eq 1 ]] && {
|
||||
cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image:${image_tag}
|
||||
}
|
||||
echo "Build completed for $image"
|
||||
return
|
||||
}
|
||||
|
||||
function build_api() {
|
||||
destination="_backend"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_backend_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../backend ../${destination}
|
||||
cd ../${destination}
|
||||
# Copy enterprise code
|
||||
[[ $1 == "ee" ]] && {
|
||||
cp -r ../ee/backend/* ./
|
||||
ee="true"
|
||||
}
|
||||
[[ $2 != "" ]] && {
|
||||
build_service $2
|
||||
[[ $PATCH -eq 1 ]] && update_helm_release $2
|
||||
cd ../backend
|
||||
rm -rf ../${destination}
|
||||
return
|
||||
}
|
||||
for image in $(ls cmd); do
|
||||
build_service $image
|
||||
echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}"
|
||||
[[ $PATCH -eq 1 ]] && update_helm_release $image
|
||||
done
|
||||
cd ../backend
|
||||
rm -rf ../${destination}
|
||||
echo "backend build completed"
|
||||
destination="_backend"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_backend_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../backend ../${destination}
|
||||
cd ../${destination}
|
||||
# Copy enterprise code
|
||||
[[ $1 == "ee" ]] && {
|
||||
cp -r ../ee/backend/* ./
|
||||
ee="true"
|
||||
}
|
||||
[[ $2 != "" ]] && {
|
||||
build_service $2
|
||||
[[ $PATCH -eq 1 ]] && update_helm_release $2
|
||||
cd ../backend
|
||||
rm -rf ../${destination}
|
||||
return
|
||||
}
|
||||
for image in $(ls cmd); do
|
||||
build_service $image
|
||||
echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}"
|
||||
[[ $PATCH -eq 1 ]] && update_helm_release $image
|
||||
done
|
||||
cd ../backend
|
||||
rm -rf ../${destination}
|
||||
echo "backend build completed"
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1 $2
|
||||
build_api "$1" "$2"
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ func (c *cacher) cacheURL(t *Task) {
|
|||
start := time.Now()
|
||||
req, _ := http.NewRequest("GET", t.requestURL, nil)
|
||||
if t.retries%2 == 0 {
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0")
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:98.0) Gecko/20100101 Firefox/98.0")
|
||||
}
|
||||
for k, v := range c.requestHeaders {
|
||||
req.Header.Set(k, v)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ type ObjectsConfig struct {
|
|||
AWSSkipSSLValidation bool `env:"AWS_SKIP_SSL_VALIDATION"`
|
||||
AzureAccountName string `env:"AZURE_ACCOUNT_NAME"`
|
||||
AzureAccountKey string `env:"AZURE_ACCOUNT_KEY"`
|
||||
UseS3Tags bool `env:"USE_S3_TAGS,default=true"`
|
||||
}
|
||||
|
||||
func (c *ObjectsConfig) UseFileTags() bool {
|
||||
|
|
|
|||
|
|
@ -119,7 +119,11 @@ func (conn *Conn) InsertWebPageEvent(sess *sessions.Session, e *messages.PageEve
|
|||
log.Printf("insert web page event in bulk err: %s", err)
|
||||
}
|
||||
// Add new value set to autocomplete bulk
|
||||
conn.InsertAutocompleteValue(sess.SessionID, sess.ProjectID, "LOCATION", url.DiscardURLQuery(path))
|
||||
location := path
|
||||
if query != "" {
|
||||
location += "?" + query
|
||||
}
|
||||
conn.InsertAutocompleteValue(sess.SessionID, sess.ProjectID, "LOCATION", location)
|
||||
conn.InsertAutocompleteValue(sess.SessionID, sess.ProjectID, "REFERRER", url.DiscardURLQuery(e.Referrer))
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import (
|
|||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
|
@ -28,7 +27,7 @@ type storageImpl struct {
|
|||
uploader *s3manager.Uploader
|
||||
svc *s3.S3
|
||||
bucket *string
|
||||
fileTag string
|
||||
fileTag *string
|
||||
}
|
||||
|
||||
func NewS3(cfg *objConfig.ObjectsConfig) (objectstorage.ObjectStorage, error) {
|
||||
|
|
@ -60,14 +59,10 @@ func NewS3(cfg *objConfig.ObjectsConfig) (objectstorage.ObjectStorage, error) {
|
|||
uploader: s3manager.NewUploader(sess),
|
||||
svc: s3.New(sess), // AWS Docs: "These clients are safe to use concurrently."
|
||||
bucket: &cfg.BucketName,
|
||||
fileTag: loadFileTag(),
|
||||
fileTag: tagging(cfg.UseS3Tags),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *storageImpl) tagging() *string {
|
||||
return &s.fileTag
|
||||
}
|
||||
|
||||
func (s *storageImpl) Upload(reader io.Reader, key string, contentType string, compression objectstorage.CompressionType) error {
|
||||
cacheControl := "max-age=2628000, immutable, private"
|
||||
var contentEncoding *string
|
||||
|
|
@ -89,7 +84,7 @@ func (s *storageImpl) Upload(reader io.Reader, key string, contentType string, c
|
|||
ContentType: &contentType,
|
||||
CacheControl: &cacheControl,
|
||||
ContentEncoding: contentEncoding,
|
||||
Tagging: s.tagging(),
|
||||
Tagging: s.fileTag,
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
|
@ -207,16 +202,3 @@ func (s *storageImpl) GetPreSignedUploadUrl(key string) (string, error) {
|
|||
}
|
||||
return urlStr, nil
|
||||
}
|
||||
|
||||
func loadFileTag() string {
|
||||
// Load file tag from env
|
||||
key := "retention"
|
||||
value := os.Getenv("RETENTION")
|
||||
if value == "" {
|
||||
value = "default"
|
||||
}
|
||||
// Create URL encoded tag set for file
|
||||
params := url.Values{}
|
||||
params.Add(key, value)
|
||||
return params.Encode()
|
||||
}
|
||||
|
|
|
|||
5
backend/pkg/objectstorage/s3/tags.go
Normal file
5
backend/pkg/objectstorage/s3/tags.go
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
package s3
|
||||
|
||||
func tagging(useTags bool) *string {
|
||||
return nil
|
||||
}
|
||||
1
ee/api/.gitignore
vendored
1
ee/api/.gitignore
vendored
|
|
@ -227,6 +227,7 @@ Pipfile.lock
|
|||
/chalicelib/core/sessions.py
|
||||
/chalicelib/core/sessions_assignments.py
|
||||
/chalicelib/core/sessions_mobs.py
|
||||
/chalicelib/core/significance.py
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
/chalicelib/core/sourcemaps_parser.py
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ ENV SOURCE_MAP_VERSION=0.7.4 \
|
|||
|
||||
WORKDIR /work
|
||||
COPY requirements.txt ./requirements.txt
|
||||
# Caching the source build
|
||||
RUN pip install --no-cache-dir --upgrade python3-saml==1.16.0 --no-binary=lxml
|
||||
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
|
|
|||
|
|
@ -15,16 +15,16 @@ fastapi = "==0.104.1"
|
|||
gunicorn = "==21.2.0"
|
||||
python-decouple = "==3.8"
|
||||
apscheduler = "==3.10.4"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.6"
|
||||
redis = "==5.0.1"
|
||||
python3-saml = "==1.16.0"
|
||||
azure-storage-blob = "==12.19.0"
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.1.14"}
|
||||
uvicorn = {extras = ["standard"], version = "==0.23.2"}
|
||||
pydantic = {extras = ["email"], version = "==2.3.0"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.6"}
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.1.12"}
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.11"
|
||||
python_version = "3.12"
|
||||
|
|
|
|||
|
|
@ -68,7 +68,9 @@ async def lifespan(app: FastAPI):
|
|||
"application_name": "AIO" + config("APP_NAME", default="PY"),
|
||||
}
|
||||
|
||||
database = psycopg_pool.AsyncConnectionPool(kwargs=database, connection_class=ORPYAsyncConnection)
|
||||
database = psycopg_pool.AsyncConnectionPool(kwargs=database, connection_class=ORPYAsyncConnection,
|
||||
min_size=config("PG_AIO_MINCONN", cast=int, default=1),
|
||||
max_size=config("PG_AIO_MAXCONN", cast=int, default=5), )
|
||||
app.state.postgresql = database
|
||||
|
||||
# App listening
|
||||
|
|
|
|||
|
|
@ -213,10 +213,10 @@ def __merge_metric_with_data(metric: schemas.CardSchema,
|
|||
if data.series is not None and len(data.series) > 0:
|
||||
metric.series = data.series
|
||||
|
||||
if len(data.filters) > 0:
|
||||
for s in metric.series:
|
||||
s.filter.filters += data.filters
|
||||
metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
|
||||
# if len(data.filters) > 0:
|
||||
# for s in metric.series:
|
||||
# s.filter.filters += data.filters
|
||||
# metric = schemas.CardSchema(**metric.model_dump(by_alias=True))
|
||||
return metric
|
||||
|
||||
|
||||
|
|
@ -277,11 +277,11 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
|||
|
||||
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
return []
|
||||
data.series[0].filter.startTimestamp = data.startTimestamp
|
||||
data.series[0].filter.endTimestamp = data.endTimestamp
|
||||
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||
return {"data": data}
|
||||
return data
|
||||
|
||||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
|
|
|
|||
|
|
@ -251,7 +251,7 @@ def get_by_project_key(project_key):
|
|||
{"project_key": project_key})
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
return row["project_id"] if row else None
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def get_project_key(project_id):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import ast
|
||||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects, performance_event, metrics
|
||||
from chalicelib.core import events, metadata, projects, performance_event, metrics, sessions_favorite, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SESSION_PROJECTION_COLS_CH = """\
|
||||
|
|
@ -110,6 +110,8 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
|
|||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
|
||||
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
|
||||
platform="web"):
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id)
|
||||
full_args, query_part = search_query_parts_ch(data=data, error_status=error_status, errors_only=errors_only,
|
||||
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
|
||||
user_id=user_id, platform=platform)
|
||||
|
|
@ -354,6 +356,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
density=density))
|
||||
extra_event = None
|
||||
extra_deduplication = []
|
||||
extra_conditions = None
|
||||
if metric_of == schemas.MetricOfTable.visited_url:
|
||||
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
|
||||
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
|
||||
|
|
@ -362,13 +365,30 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
|
|||
AND ev.project_id = %(project_id)s
|
||||
AND ev.event_type = 'LOCATION'"""
|
||||
extra_deduplication.append("url_path")
|
||||
extra_conditions = {}
|
||||
for e in data.events:
|
||||
if e.type == schemas.EventType.location:
|
||||
if e.operator not in extra_conditions:
|
||||
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
|
||||
"type": e.type,
|
||||
"isEvent": True,
|
||||
"value": [],
|
||||
"operator": e.operator,
|
||||
"filters": []
|
||||
})
|
||||
for v in e.value:
|
||||
if v not in extra_conditions[e.operator].value:
|
||||
extra_conditions[e.operator].value.append(v)
|
||||
extra_conditions = list(extra_conditions.values())
|
||||
|
||||
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None, extra_event=extra_event,
|
||||
extra_deduplication=extra_deduplication)
|
||||
extra_deduplication=extra_deduplication,
|
||||
extra_conditions=extra_conditions)
|
||||
full_args["step_size"] = step_size
|
||||
sessions = []
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
|
|
@ -521,7 +541,14 @@ def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEve
|
|||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, platform="web", extra_event=None, extra_deduplication=[]):
|
||||
project_id, user_id, platform="web", extra_event=None, extra_deduplication=[],
|
||||
extra_conditions=None):
|
||||
if issue:
|
||||
data.filters.append(
|
||||
schemas.SessionSearchFilterSchema(value=[issue['type']],
|
||||
type=schemas.FilterType.issue.value,
|
||||
operator='is')
|
||||
)
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
@ -1446,12 +1473,17 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
extra_join = ""
|
||||
if issue is not None:
|
||||
extra_join = """
|
||||
INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
WHERE issues.session_id=f.session_id
|
||||
AND p_issues.type=%(issue_type)s
|
||||
AND p_issues.context_string=%(issue_contextString)s
|
||||
AND timestamp >= f.first_event_ts
|
||||
AND timestamp <= f.last_event_ts) AS issues ON(TRUE)
|
||||
INNER JOIN (SELECT session_id
|
||||
FROM experimental.issues
|
||||
INNER JOIN experimental.events USING (issue_id)
|
||||
WHERE issues.type = %(issue_type)s
|
||||
AND issues.context_string = %(issue_contextString)s
|
||||
AND issues.project_id = %(projectId)s
|
||||
AND events.project_id = %(projectId)s
|
||||
AND events.issue_type = %(issue_type)s
|
||||
AND events.datetime >= toDateTime(%(startDate)s/1000)
|
||||
AND events.datetime <= toDateTime(%(endDate)s/1000)
|
||||
) AS issues ON (f.session_id = issues.session_id)
|
||||
"""
|
||||
full_args["issue_contextString"] = issue["contextString"]
|
||||
full_args["issue_type"] = issue["type"]
|
||||
|
|
@ -1476,9 +1508,24 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
|
||||
if extra_event:
|
||||
extra_event = f"INNER JOIN ({extra_event}) AS extra_event USING(session_id)"
|
||||
# extra_join = f"""INNER JOIN {extra_event} AS ev USING(session_id)"""
|
||||
# extra_constraints.append("ev.timestamp>=%(startDate)s")
|
||||
# extra_constraints.append("ev.timestamp<=%(endDate)s")
|
||||
if extra_conditions and len(extra_conditions) > 0:
|
||||
_extra_or_condition = []
|
||||
for i, c in enumerate(extra_conditions):
|
||||
if _isAny_opreator(c.operator):
|
||||
continue
|
||||
e_k = f"ec_value{i}"
|
||||
op = __get_sql_operator(c.operator)
|
||||
c.value = helper.values_for_operator(value=c.value, op=c.operator)
|
||||
full_args = {**full_args,
|
||||
**_multiple_values(c.value, value_key=e_k)}
|
||||
if c.type == events.EventType.LOCATION.ui_type:
|
||||
_extra_or_condition.append(
|
||||
_multiple_conditions(f"extra_event.url_path {op} %({e_k})s",
|
||||
c.value, value_key=e_k))
|
||||
else:
|
||||
logging.warning(f"unsupported extra_event type:${c.type}")
|
||||
if len(_extra_or_condition) > 0:
|
||||
extra_constraints.append("(" + " OR ".join(_extra_or_condition) + ")")
|
||||
else:
|
||||
extra_event = ""
|
||||
if errors_only:
|
||||
|
|
@ -1668,3 +1715,29 @@ def check_recording_status(project_id: int) -> dict:
|
|||
"recordingStatus": row["recording_status"],
|
||||
"sessionsCount": row["sessions_count"]
|
||||
}
|
||||
|
||||
|
||||
# TODO: rewrite this function to use ClickHouse
|
||||
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
|
||||
ascending: bool = False) -> dict:
|
||||
if session_ids is None or len(session_ids) == 0:
|
||||
return {"total": 0, "sessions": []}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
|
||||
order_direction = 'ASC' if ascending else 'DESC'
|
||||
main_query = cur.mogrify(f"""SELECT {sessions_legacy.SESSION_PROJECTION_BASE_COLS}
|
||||
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
|
||||
FROM public.sessions AS s
|
||||
WHERE project_id=%(project_id)s
|
||||
AND session_id IN %(session_ids)s
|
||||
ORDER BY {sort_by} {order_direction};""", params)
|
||||
|
||||
cur.execute(main_query)
|
||||
rows = cur.fetchall()
|
||||
if len(meta_keys) > 0:
|
||||
for s in rows:
|
||||
s["metadata"] = {}
|
||||
for m in meta_keys:
|
||||
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
|
||||
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
|
||||
|
|
|
|||
|
|
@ -128,16 +128,15 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
|
|||
return row
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id, note_id):
|
||||
def delete(project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id})
|
||||
{"project_id": project_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,619 +0,0 @@
|
|||
__author__ = "AZNAUROV David"
|
||||
__maintainer__ = "KRAIEM Taha Yassine"
|
||||
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
"""
|
||||
todo: remove LIMIT from the query
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
import math
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SIGNIFICANCE_THRSH = 0.4
|
||||
# Taha: the value 24 was estimated in v1.15
|
||||
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
|
||||
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
|
||||
21: 2.080, 22: 2.074, 23: 2.069, 24: 2.067, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045,
|
||||
30: 2.042}
|
||||
|
||||
|
||||
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
|
||||
"""
|
||||
Add minimal timestamp
|
||||
:param filter_d: dict contains events&filters&...
|
||||
:return:
|
||||
"""
|
||||
stages: [dict] = filter_d.events
|
||||
filters: [dict] = filter_d.filters
|
||||
filter_issues = []
|
||||
# TODO: enable this if needed by an endpoint
|
||||
# filter_issues = filter_d.get("issueTypes")
|
||||
# if filter_issues is None or len(filter_issues) == 0:
|
||||
# filter_issues = []
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
|
||||
"s.start_ts <= %(endTimestamp)s"]
|
||||
filter_extra_from = []
|
||||
n_stages_query = []
|
||||
values = {}
|
||||
if len(filters) > 0:
|
||||
meta_keys = None
|
||||
for i, f in enumerate(filters):
|
||||
if len(f.value) == 0:
|
||||
continue
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
# filter_args = _multiple_values(f["value"])
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
|
||||
filter_type = f.type
|
||||
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
|
||||
f_k = f"f_value{i}"
|
||||
values = {**values,
|
||||
**sh.multi_values(helper.values_for_operator(value=f.value, op=f.operator),
|
||||
value_key=f_k)}
|
||||
if filter_type == schemas.FilterType.user_browser:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.duration:
|
||||
if len(f.value) > 0 and f.value[0] is not None:
|
||||
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
|
||||
values["minDuration"] = f.value[0]
|
||||
if len(f["value"]) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
|
||||
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
|
||||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.referrer:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
if f.source in meta_keys.keys():
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(
|
||||
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
|
||||
value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_anonymous_id,
|
||||
schemas.FilterType.user_anonymous_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
i = -1
|
||||
for s in stages:
|
||||
|
||||
if s.operator is None:
|
||||
s.operator = schemas.SearchEventOperator._is
|
||||
|
||||
if not isinstance(s.value, list):
|
||||
s.value = [s.value]
|
||||
is_any = sh.isAny_opreator(s.operator)
|
||||
if not is_any and isinstance(s.value, list) and len(s.value) == 0:
|
||||
continue
|
||||
i += 1
|
||||
if i == 0:
|
||||
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == events.EventType.CLICK_IOS.ui_type:
|
||||
next_table = events.EventType.CLICK_IOS.table
|
||||
next_col_name = events.EventType.CLICK_IOS.column
|
||||
elif event_type == events.EventType.INPUT_IOS.ui_type:
|
||||
next_table = events.EventType.INPUT_IOS.table
|
||||
next_col_name = events.EventType.INPUT_IOS.column
|
||||
elif event_type == events.EventType.VIEW_IOS.ui_type:
|
||||
next_table = events.EventType.VIEW_IOS.table
|
||||
next_col_name = events.EventType.VIEW_IOS.column
|
||||
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
|
||||
next_table = events.EventType.CUSTOM_IOS.table
|
||||
next_col_name = events.EventType.CUSTOM_IOS.column
|
||||
else:
|
||||
logging.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
||||
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
|
||||
value_key=f"value{i + 1}")}
|
||||
if sh.is_negation_operator(s.operator) and i > 0:
|
||||
op = sh.reverse_sql_operator(op)
|
||||
main_condition = "left_not.session_id ISNULL"
|
||||
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
|
||||
FROM {next_table} AS s_main
|
||||
WHERE
|
||||
{sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s",
|
||||
values=s.value, value_key=f"value{i + 1}")}
|
||||
AND s_main.timestamp >= T{i}.stage{i}_timestamp
|
||||
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
|
||||
else:
|
||||
if is_any:
|
||||
main_condition = "TRUE"
|
||||
else:
|
||||
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
|
||||
values=s.value, value_key=f"value{i + 1}")
|
||||
n_stages_query.append(f"""
|
||||
(SELECT main.session_id,
|
||||
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
|
||||
FROM {next_table} AS main {" ".join(extra_from)}
|
||||
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
|
||||
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
|
||||
AND {main_condition}
|
||||
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
|
||||
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
|
||||
GROUP BY main.session_id)
|
||||
AS T{i + 1} {"ON (TRUE)" if i > 0 else ""}
|
||||
""")
|
||||
n_stages = len(n_stages_query)
|
||||
if n_stages == 0:
|
||||
return []
|
||||
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
|
||||
n_stages_query += ") AS stages_t"
|
||||
|
||||
n_stages_query = f"""
|
||||
SELECT stages_and_issues_t.*, sessions.user_uuid
|
||||
FROM (
|
||||
SELECT * FROM (
|
||||
SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])}
|
||||
FROM {n_stages_query}
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT ISS.type as issue_type,
|
||||
ISE.timestamp AS issue_timestamp,
|
||||
COALESCE(ISS.context_string,'') as issue_context,
|
||||
ISS.issue_id as issue_id
|
||||
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
|
||||
AND ISS.project_id=%(project_id)s
|
||||
AND ISE.session_id = stages_t.session_id
|
||||
AND ISS.type!='custom' -- ignore custom issues because they are massive
|
||||
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}
|
||||
LIMIT 10 -- remove the limit to get exact stats
|
||||
) AS issues_t ON (TRUE)
|
||||
) AS stages_and_issues_t INNER JOIN sessions USING(session_id);
|
||||
"""
|
||||
|
||||
# LIMIT 10000
|
||||
params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
|
||||
"endTimestamp": filter_d.endTimestamp,
|
||||
"issueTypes": tuple(filter_issues), **values}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(n_stages_query, params)
|
||||
logging.debug("---------------------------------------------------")
|
||||
logging.debug(query)
|
||||
logging.debug("---------------------------------------------------")
|
||||
try:
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
except Exception as err:
|
||||
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(filter_d.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
raise err
|
||||
return rows
|
||||
|
||||
|
||||
def pearson_corr(x: list, y: list):
|
||||
n = len(x)
|
||||
if n != len(y):
|
||||
raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
|
||||
|
||||
if n < 2:
|
||||
warnings.warn(f'x and y must have length at least 2. Got {n} instead')
|
||||
return None, None, False
|
||||
|
||||
# If an input is constant, the correlation coefficient is not defined.
|
||||
if all(t == x[0] for t in x) or all(t == y[0] for t in y):
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
return None, None, False
|
||||
|
||||
if n == 2:
|
||||
return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0, True
|
||||
|
||||
xmean = sum(x) / len(x)
|
||||
ymean = sum(y) / len(y)
|
||||
|
||||
xm = [el - xmean for el in x]
|
||||
ym = [el - ymean for el in y]
|
||||
|
||||
normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
|
||||
normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
|
||||
|
||||
threshold = 1e-8
|
||||
if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
|
||||
# If all the values in x (likewise y) are very close to the mean,
|
||||
# the loss of precision that occurs in the subtraction xm = x - xmean
|
||||
# might result in large errors in r.
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
|
||||
r = sum(
|
||||
i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
|
||||
|
||||
# Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
|
||||
# However, if r < 0, we don't care, as our problem is to find only positive correlations
|
||||
r = max(min(r, 1.0), 0.0)
|
||||
|
||||
# approximated confidence
|
||||
if n < 31:
|
||||
t_c = T_VALUES[n]
|
||||
elif n < 50:
|
||||
t_c = 2.02
|
||||
else:
|
||||
t_c = 2
|
||||
if r >= 0.999:
|
||||
confidence = 1
|
||||
else:
|
||||
confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
|
||||
|
||||
if confidence > SIGNIFICANCE_THRSH:
|
||||
return r, confidence, True
|
||||
else:
|
||||
return r, confidence, False
|
||||
|
||||
|
||||
# def tuple_or(t: tuple):
|
||||
# x = 0
|
||||
# for el in t:
|
||||
# x |= el # | is for bitwise OR
|
||||
# return x
|
||||
#
|
||||
# The following function is correct optimization of the previous function because t is a list of 0,1
|
||||
def tuple_or(t: tuple):
|
||||
for el in t:
|
||||
if el > 0:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage):
|
||||
"""
|
||||
Returns two lists with binary values 0/1:
|
||||
|
||||
transitions ::: if transited from the first stage to the last - 1
|
||||
else - 0
|
||||
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
|
||||
the values are lists
|
||||
if an issue happened between the first stage to the last - 1
|
||||
else - 0
|
||||
|
||||
For a small task of calculating a total drop due to issues,
|
||||
we need to disregard the issue type when creating the `errors`-like array.
|
||||
The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
|
||||
The `transitions` array stays the same
|
||||
"""
|
||||
transitions = []
|
||||
n_sess_affected = 0
|
||||
errors = {}
|
||||
|
||||
for row in rows:
|
||||
t = 0
|
||||
first_ts = row[f'stage{first_stage}_timestamp']
|
||||
last_ts = row[f'stage{last_stage}_timestamp']
|
||||
if first_ts is None:
|
||||
continue
|
||||
elif last_ts is not None:
|
||||
t = 1
|
||||
transitions.append(t)
|
||||
|
||||
ic_present = False
|
||||
for error_id in all_issues:
|
||||
if error_id not in errors:
|
||||
errors[error_id] = []
|
||||
ic = 0
|
||||
row_issue_id = row['issue_id']
|
||||
if row_issue_id is not None:
|
||||
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
|
||||
if error_id == row_issue_id:
|
||||
ic = 1
|
||||
ic_present = True
|
||||
errors[error_id].append(ic)
|
||||
|
||||
if ic_present and t:
|
||||
n_sess_affected += 1
|
||||
|
||||
all_errors = [tuple_or(t) for t in zip(*errors.values())]
|
||||
|
||||
return transitions, errors, all_errors, n_sess_affected
|
||||
|
||||
|
||||
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
|
||||
"""
|
||||
|
||||
:param rows:
|
||||
:param first_stage:
|
||||
:param last_stage:
|
||||
:return:
|
||||
"""
|
||||
affected_users = defaultdict(lambda: set())
|
||||
affected_sessions = defaultdict(lambda: set())
|
||||
all_issues = {}
|
||||
n_affected_users_dict = defaultdict(lambda: None)
|
||||
n_affected_sessions_dict = defaultdict(lambda: None)
|
||||
n_issues_dict = defaultdict(lambda: 0)
|
||||
issues_by_session = defaultdict(lambda: 0)
|
||||
|
||||
for row in rows:
|
||||
|
||||
# check that the session has reached the first stage of subfunnel:
|
||||
if row[f'stage{first_stage}_timestamp'] is None:
|
||||
continue
|
||||
|
||||
iss = row['issue_type']
|
||||
iss_ts = row['issue_timestamp']
|
||||
|
||||
# check that the issue exists and belongs to subfunnel:
|
||||
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
|
||||
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
|
||||
if row["issue_id"] not in all_issues:
|
||||
all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]}
|
||||
n_issues_dict[row["issue_id"]] += 1
|
||||
if row['user_uuid'] is not None:
|
||||
affected_users[row["issue_id"]].add(row['user_uuid'])
|
||||
|
||||
affected_sessions[row["issue_id"]].add(row['session_id'])
|
||||
issues_by_session[row[f'session_id']] += 1
|
||||
|
||||
if len(affected_users) > 0:
|
||||
n_affected_users_dict.update({
|
||||
iss: len(affected_users[iss]) for iss in affected_users
|
||||
})
|
||||
if len(affected_sessions) > 0:
|
||||
n_affected_sessions_dict.update({
|
||||
iss: len(affected_sessions[iss]) for iss in affected_sessions
|
||||
})
|
||||
return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict
|
||||
|
||||
|
||||
def count_sessions(rows, n_stages):
|
||||
session_counts = {i: set() for i in range(1, n_stages + 1)}
|
||||
for row in rows:
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
session_counts[i].add(row[f"session_id"])
|
||||
|
||||
session_counts = {i: len(session_counts[i]) for i in session_counts}
|
||||
return session_counts
|
||||
|
||||
|
||||
def count_users(rows, n_stages):
|
||||
users_in_stages = {i: set() for i in range(1, n_stages + 1)}
|
||||
for row in rows:
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
users_in_stages[i].add(row["user_uuid"])
|
||||
|
||||
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
|
||||
return users_count
|
||||
|
||||
|
||||
def get_stages(stages, rows):
|
||||
n_stages = len(stages)
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
users_counts = count_users(rows, n_stages)
|
||||
|
||||
stages_list = []
|
||||
for i, stage in enumerate(stages):
|
||||
|
||||
drop = None
|
||||
if i != 0:
|
||||
if session_counts[i] == 0:
|
||||
drop = 0
|
||||
elif session_counts[i] > 0:
|
||||
drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
|
||||
|
||||
stages_list.append(
|
||||
{"value": stage.value,
|
||||
"type": stage.type,
|
||||
"operator": stage.operator,
|
||||
"sessionsCount": session_counts[i + 1],
|
||||
"drop_pct": drop,
|
||||
"usersCount": users_counts[i + 1],
|
||||
"dropDueToIssues": 0
|
||||
}
|
||||
)
|
||||
return stages_list
|
||||
|
||||
|
||||
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
|
||||
"""
|
||||
|
||||
:param stages:
|
||||
:param rows:
|
||||
:param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
|
||||
:param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
|
||||
:return:
|
||||
"""
|
||||
|
||||
n_stages = len(stages)
|
||||
|
||||
if first_stage is None:
|
||||
first_stage = 1
|
||||
if last_stage is None:
|
||||
last_stage = n_stages
|
||||
if last_stage > n_stages:
|
||||
logging.debug(
|
||||
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||
last_stage = n_stages
|
||||
|
||||
n_critical_issues = 0
|
||||
issues_dict = {"significant": [],
|
||||
"insignificant": []}
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
drop = session_counts[first_stage] - session_counts[last_stage]
|
||||
|
||||
all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues(
|
||||
rows, first_stage, last_stage)
|
||||
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
|
||||
all_issues,
|
||||
first_stage, last_stage)
|
||||
|
||||
del rows
|
||||
|
||||
if any(all_errors):
|
||||
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||
if total_drop_corr is not None and drop is not None:
|
||||
total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
|
||||
if drop_only:
|
||||
return total_drop_due_to_issues
|
||||
for issue_id in all_issues:
|
||||
|
||||
if not any(errors[issue_id]):
|
||||
continue
|
||||
r, confidence, is_sign = pearson_corr(transitions, errors[issue_id])
|
||||
|
||||
if r is not None and drop is not None and is_sign:
|
||||
lost_conversions = int(r * affected_sessions[issue_id])
|
||||
else:
|
||||
lost_conversions = None
|
||||
if r is None:
|
||||
r = 0
|
||||
issues_dict['significant' if is_sign else 'insignificant'].append({
|
||||
"type": all_issues[issue_id]["issue_type"],
|
||||
"title": helper.get_issue_title(all_issues[issue_id]["issue_type"]),
|
||||
"affected_sessions": affected_sessions[issue_id],
|
||||
"unaffected_sessions": session_counts[1] - affected_sessions[issue_id],
|
||||
"lost_conversions": lost_conversions,
|
||||
"affected_users": affected_users_dict[issue_id],
|
||||
"conversion_impact": round(r * 100),
|
||||
"context_string": all_issues[issue_id]["context"],
|
||||
"issue_id": issue_id
|
||||
})
|
||||
|
||||
if is_sign:
|
||||
n_critical_issues += n_issues_dict[issue_id]
|
||||
# To limit the number of returned issues to the frontend
|
||||
issues_dict["significant"] = issues_dict["significant"][:20]
|
||||
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
|
||||
|
||||
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
|
||||
output = []
|
||||
stages = filter_d.events
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
logging.debug("no stages found")
|
||||
return output, 0
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output = [{
|
||||
"type": stages[0].type,
|
||||
"value": stages[0].value,
|
||||
"dropPercentage": None,
|
||||
"operator": stages[0].operator,
|
||||
"sessionsCount": 0,
|
||||
"dropPct": 0,
|
||||
"usersCount": 0,
|
||||
"dropDueToIssues": 0
|
||||
|
||||
}]
|
||||
# original
|
||||
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# first change
|
||||
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# last change
|
||||
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
|
||||
project_id=project_id, user_id=None, count_only=True)
|
||||
output[0]["sessionsCount"] = counts["countSessions"]
|
||||
output[0]["usersCount"] = counts["countUsers"]
|
||||
return output, 0
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return get_stages(stages, []), 0
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
# Obtain the second part of the output
|
||||
total_drop_due_to_issues = get_issues(stages, rows,
|
||||
first_stage=1,
|
||||
last_stage=len(filter_d.events),
|
||||
drop_only=True)
|
||||
return stages_list, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
|
||||
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
|
||||
stages = filter_d.events
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return output
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
output['total_drop_due_to_issues'] = total_drop_due_to_issues
|
||||
# output['critical_issues_count'] = n_critical_issues
|
||||
output = {**output, **issues_dict}
|
||||
return output
|
||||
|
|
@ -1,618 +1,2 @@
|
|||
__maintainer__ = "KRAIEM Taha Yassine"
|
||||
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
"""
|
||||
todo: remove LIMIT from the query
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
import math
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SIGNIFICANCE_THRSH = 0.4
|
||||
# Taha: the value 24 was estimated in v1.15
|
||||
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
|
||||
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
|
||||
21: 2.080, 22: 2.074, 23: 2.069, 24: 2.067, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045,
|
||||
30: 2.042}
|
||||
|
||||
|
||||
def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
|
||||
"""
|
||||
Add minimal timestamp
|
||||
:param filter_d: dict contains events&filters&...
|
||||
:return:
|
||||
"""
|
||||
stages: [dict] = filter_d.events
|
||||
filters: [dict] = filter_d.filters
|
||||
filter_issues = []
|
||||
# TODO: enable this if needed by an endpoint
|
||||
# filter_issues = filter_d.get("issueTypes")
|
||||
# if filter_issues is None or len(filter_issues) == 0:
|
||||
# filter_issues = []
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
|
||||
"s.start_ts <= %(endTimestamp)s"]
|
||||
filter_extra_from = []
|
||||
n_stages_query = []
|
||||
values = {}
|
||||
if len(filters) > 0:
|
||||
meta_keys = None
|
||||
for i, f in enumerate(filters):
|
||||
if len(f.value) == 0:
|
||||
continue
|
||||
f.value = helper.values_for_operator(value=f.value, op=f.operator)
|
||||
# filter_args = _multiple_values(f["value"])
|
||||
op = sh.get_sql_operator(f.operator)
|
||||
|
||||
filter_type = f.type
|
||||
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
|
||||
f_k = f"f_value{i}"
|
||||
values = {**values,
|
||||
**sh.multi_values(helper.values_for_operator(value=f.value, op=f.operator),
|
||||
value_key=f_k)}
|
||||
if filter_type == schemas.FilterType.user_browser:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.duration:
|
||||
if len(f.value) > 0 and f.value[0] is not None:
|
||||
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
|
||||
values["minDuration"] = f.value[0]
|
||||
if len(f["value"]) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
|
||||
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
|
||||
values["maxDuration"] = f.value[1]
|
||||
elif filter_type == schemas.FilterType.referrer:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f"p.base_referrer {op} %({f_k})s", f.value, value_key=f_k))
|
||||
elif filter_type == events.EventType.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
if f.source in meta_keys.keys():
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(
|
||||
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
|
||||
value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_anonymous_id,
|
||||
schemas.FilterType.user_anonymous_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
i = -1
|
||||
for s in stages:
|
||||
|
||||
if s.operator is None:
|
||||
s.operator = schemas.SearchEventOperator._is
|
||||
|
||||
if not isinstance(s.value, list):
|
||||
s.value = [s.value]
|
||||
is_any = sh.isAny_opreator(s.operator)
|
||||
if not is_any and isinstance(s.value, list) and len(s.value) == 0:
|
||||
continue
|
||||
i += 1
|
||||
if i == 0:
|
||||
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
op = sh.get_sql_operator(s.operator)
|
||||
# event_type = s["type"].upper()
|
||||
event_type = s.type
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
next_table = events.EventType.CLICK.table
|
||||
next_col_name = events.EventType.CLICK.column
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
next_table = events.EventType.INPUT.table
|
||||
next_col_name = events.EventType.INPUT.column
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
next_table = events.EventType.LOCATION.table
|
||||
next_col_name = events.EventType.LOCATION.column
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
next_table = events.EventType.CUSTOM.table
|
||||
next_col_name = events.EventType.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == events.EventType.CLICK_IOS.ui_type:
|
||||
next_table = events.EventType.CLICK_IOS.table
|
||||
next_col_name = events.EventType.CLICK_IOS.column
|
||||
elif event_type == events.EventType.INPUT_IOS.ui_type:
|
||||
next_table = events.EventType.INPUT_IOS.table
|
||||
next_col_name = events.EventType.INPUT_IOS.column
|
||||
elif event_type == events.EventType.VIEW_IOS.ui_type:
|
||||
next_table = events.EventType.VIEW_IOS.table
|
||||
next_col_name = events.EventType.VIEW_IOS.column
|
||||
elif event_type == events.EventType.CUSTOM_IOS.ui_type:
|
||||
next_table = events.EventType.CUSTOM_IOS.table
|
||||
next_col_name = events.EventType.CUSTOM_IOS.column
|
||||
else:
|
||||
logging.warning(f"=================UNDEFINED:{event_type}")
|
||||
continue
|
||||
|
||||
values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
|
||||
value_key=f"value{i + 1}")}
|
||||
if sh.is_negation_operator(s.operator) and i > 0:
|
||||
op = sh.reverse_sql_operator(op)
|
||||
main_condition = "left_not.session_id ISNULL"
|
||||
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
|
||||
FROM {next_table} AS s_main
|
||||
WHERE
|
||||
{sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s",
|
||||
values=s.value, value_key=f"value{i + 1}")}
|
||||
AND s_main.timestamp >= T{i}.stage{i}_timestamp
|
||||
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
|
||||
else:
|
||||
if is_any:
|
||||
main_condition = "TRUE"
|
||||
else:
|
||||
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
|
||||
values=s.value, value_key=f"value{i + 1}")
|
||||
n_stages_query.append(f"""
|
||||
(SELECT main.session_id,
|
||||
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
|
||||
FROM {next_table} AS main {" ".join(extra_from)}
|
||||
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
|
||||
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
|
||||
AND {main_condition}
|
||||
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
|
||||
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
|
||||
GROUP BY main.session_id)
|
||||
AS T{i + 1} {"ON (TRUE)" if i > 0 else ""}
|
||||
""")
|
||||
n_stages = len(n_stages_query)
|
||||
if n_stages == 0:
|
||||
return []
|
||||
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
|
||||
n_stages_query += ") AS stages_t"
|
||||
|
||||
n_stages_query = f"""
|
||||
SELECT stages_and_issues_t.*, sessions.user_uuid
|
||||
FROM (
|
||||
SELECT * FROM (
|
||||
SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])}
|
||||
FROM {n_stages_query}
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT ISS.type as issue_type,
|
||||
ISE.timestamp AS issue_timestamp,
|
||||
COALESCE(ISS.context_string,'') as issue_context,
|
||||
ISS.issue_id as issue_id
|
||||
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
|
||||
AND ISS.project_id=%(project_id)s
|
||||
AND ISE.session_id = stages_t.session_id
|
||||
AND ISS.type!='custom' -- ignore custom issues because they are massive
|
||||
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}
|
||||
LIMIT 10 -- remove the limit to get exact stats
|
||||
) AS issues_t ON (TRUE)
|
||||
) AS stages_and_issues_t INNER JOIN sessions USING(session_id);
|
||||
"""
|
||||
|
||||
# LIMIT 10000
|
||||
params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
|
||||
"endTimestamp": filter_d.endTimestamp,
|
||||
"issueTypes": tuple(filter_issues), **values}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(n_stages_query, params)
|
||||
logging.debug("---------------------------------------------------")
|
||||
logging.debug(query)
|
||||
logging.debug("---------------------------------------------------")
|
||||
try:
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
except Exception as err:
|
||||
logging.warning("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(query.decode('UTF-8'))
|
||||
logging.warning("--------- PAYLOAD -----------")
|
||||
logging.warning(filter_d.model_dump_json())
|
||||
logging.warning("--------------------")
|
||||
raise err
|
||||
return rows
|
||||
|
||||
|
||||
def pearson_corr(x: list, y: list):
|
||||
n = len(x)
|
||||
if n != len(y):
|
||||
raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
|
||||
|
||||
if n < 2:
|
||||
warnings.warn(f'x and y must have length at least 2. Got {n} instead')
|
||||
return None, None, False
|
||||
|
||||
# If an input is constant, the correlation coefficient is not defined.
|
||||
if all(t == x[0] for t in x) or all(t == y[0] for t in y):
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
return None, None, False
|
||||
|
||||
if n == 2:
|
||||
return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0, True
|
||||
|
||||
xmean = sum(x) / len(x)
|
||||
ymean = sum(y) / len(y)
|
||||
|
||||
xm = [el - xmean for el in x]
|
||||
ym = [el - ymean for el in y]
|
||||
|
||||
normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
|
||||
normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
|
||||
|
||||
threshold = 1e-8
|
||||
if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
|
||||
# If all the values in x (likewise y) are very close to the mean,
|
||||
# the loss of precision that occurs in the subtraction xm = x - xmean
|
||||
# might result in large errors in r.
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
|
||||
r = sum(
|
||||
i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
|
||||
|
||||
# Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
|
||||
# However, if r < 0, we don't care, as our problem is to find only positive correlations
|
||||
r = max(min(r, 1.0), 0.0)
|
||||
|
||||
# approximated confidence
|
||||
if n < 31:
|
||||
t_c = T_VALUES[n]
|
||||
elif n < 50:
|
||||
t_c = 2.02
|
||||
else:
|
||||
t_c = 2
|
||||
if r >= 0.999:
|
||||
confidence = 1
|
||||
else:
|
||||
confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
|
||||
|
||||
if confidence > SIGNIFICANCE_THRSH:
|
||||
return r, confidence, True
|
||||
else:
|
||||
return r, confidence, False
|
||||
|
||||
|
||||
# def tuple_or(t: tuple):
|
||||
# x = 0
|
||||
# for el in t:
|
||||
# x |= el # | is for bitwise OR
|
||||
# return x
|
||||
#
|
||||
# The following function is correct optimization of the previous function because t is a list of 0,1
|
||||
def tuple_or(t: tuple):
|
||||
for el in t:
|
||||
if el > 0:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage):
|
||||
"""
|
||||
Returns two lists with binary values 0/1:
|
||||
|
||||
transitions ::: if transited from the first stage to the last - 1
|
||||
else - 0
|
||||
errors ::: a dictionary WHERE the keys are all unique issues (currently context-wise)
|
||||
the values are lists
|
||||
if an issue happened between the first stage to the last - 1
|
||||
else - 0
|
||||
|
||||
For a small task of calculating a total drop due to issues,
|
||||
we need to disregard the issue type when creating the `errors`-like array.
|
||||
The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
|
||||
The `transitions` array stays the same
|
||||
"""
|
||||
transitions = []
|
||||
n_sess_affected = 0
|
||||
errors = {}
|
||||
|
||||
for row in rows:
|
||||
t = 0
|
||||
first_ts = row[f'stage{first_stage}_timestamp']
|
||||
last_ts = row[f'stage{last_stage}_timestamp']
|
||||
if first_ts is None:
|
||||
continue
|
||||
elif last_ts is not None:
|
||||
t = 1
|
||||
transitions.append(t)
|
||||
|
||||
ic_present = False
|
||||
for error_id in all_issues:
|
||||
if error_id not in errors:
|
||||
errors[error_id] = []
|
||||
ic = 0
|
||||
row_issue_id = row['issue_id']
|
||||
if row_issue_id is not None:
|
||||
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
|
||||
if error_id == row_issue_id:
|
||||
ic = 1
|
||||
ic_present = True
|
||||
errors[error_id].append(ic)
|
||||
|
||||
if ic_present and t:
|
||||
n_sess_affected += 1
|
||||
|
||||
all_errors = [tuple_or(t) for t in zip(*errors.values())]
|
||||
|
||||
return transitions, errors, all_errors, n_sess_affected
|
||||
|
||||
|
||||
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
|
||||
"""
|
||||
|
||||
:param rows:
|
||||
:param first_stage:
|
||||
:param last_stage:
|
||||
:return:
|
||||
"""
|
||||
affected_users = defaultdict(lambda: set())
|
||||
affected_sessions = defaultdict(lambda: set())
|
||||
all_issues = {}
|
||||
n_affected_users_dict = defaultdict(lambda: None)
|
||||
n_affected_sessions_dict = defaultdict(lambda: None)
|
||||
n_issues_dict = defaultdict(lambda: 0)
|
||||
issues_by_session = defaultdict(lambda: 0)
|
||||
|
||||
for row in rows:
|
||||
|
||||
# check that the session has reached the first stage of subfunnel:
|
||||
if row[f'stage{first_stage}_timestamp'] is None:
|
||||
continue
|
||||
|
||||
iss = row['issue_type']
|
||||
iss_ts = row['issue_timestamp']
|
||||
|
||||
# check that the issue exists and belongs to subfunnel:
|
||||
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
|
||||
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
|
||||
if row["issue_id"] not in all_issues:
|
||||
all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]}
|
||||
n_issues_dict[row["issue_id"]] += 1
|
||||
if row['user_uuid'] is not None:
|
||||
affected_users[row["issue_id"]].add(row['user_uuid'])
|
||||
|
||||
affected_sessions[row["issue_id"]].add(row['session_id'])
|
||||
issues_by_session[row[f'session_id']] += 1
|
||||
|
||||
if len(affected_users) > 0:
|
||||
n_affected_users_dict.update({
|
||||
iss: len(affected_users[iss]) for iss in affected_users
|
||||
})
|
||||
if len(affected_sessions) > 0:
|
||||
n_affected_sessions_dict.update({
|
||||
iss: len(affected_sessions[iss]) for iss in affected_sessions
|
||||
})
|
||||
return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict
|
||||
|
||||
|
||||
def count_sessions(rows, n_stages):
|
||||
session_counts = {i: set() for i in range(1, n_stages + 1)}
|
||||
for row in rows:
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
session_counts[i].add(row[f"session_id"])
|
||||
|
||||
session_counts = {i: len(session_counts[i]) for i in session_counts}
|
||||
return session_counts
|
||||
|
||||
|
||||
def count_users(rows, n_stages):
|
||||
users_in_stages = {i: set() for i in range(1, n_stages + 1)}
|
||||
for row in rows:
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
users_in_stages[i].add(row["user_uuid"])
|
||||
|
||||
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
|
||||
return users_count
|
||||
|
||||
|
||||
def get_stages(stages, rows):
|
||||
n_stages = len(stages)
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
users_counts = count_users(rows, n_stages)
|
||||
|
||||
stages_list = []
|
||||
for i, stage in enumerate(stages):
|
||||
|
||||
drop = None
|
||||
if i != 0:
|
||||
if session_counts[i] == 0:
|
||||
drop = 0
|
||||
elif session_counts[i] > 0:
|
||||
drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
|
||||
|
||||
stages_list.append(
|
||||
{"value": stage.value,
|
||||
"type": stage.type,
|
||||
"operator": stage.operator,
|
||||
"sessionsCount": session_counts[i + 1],
|
||||
"drop_pct": drop,
|
||||
"usersCount": users_counts[i + 1],
|
||||
"dropDueToIssues": 0
|
||||
}
|
||||
)
|
||||
return stages_list
|
||||
|
||||
|
||||
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
|
||||
"""
|
||||
|
||||
:param stages:
|
||||
:param rows:
|
||||
:param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
|
||||
:param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
|
||||
:return:
|
||||
"""
|
||||
|
||||
n_stages = len(stages)
|
||||
|
||||
if first_stage is None:
|
||||
first_stage = 1
|
||||
if last_stage is None:
|
||||
last_stage = n_stages
|
||||
if last_stage > n_stages:
|
||||
logging.debug(
|
||||
"The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||
last_stage = n_stages
|
||||
|
||||
n_critical_issues = 0
|
||||
issues_dict = {"significant": [],
|
||||
"insignificant": []}
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
drop = session_counts[first_stage] - session_counts[last_stage]
|
||||
|
||||
all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues(
|
||||
rows, first_stage, last_stage)
|
||||
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
|
||||
all_issues,
|
||||
first_stage, last_stage)
|
||||
|
||||
del rows
|
||||
|
||||
if any(all_errors):
|
||||
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||
if total_drop_corr is not None and drop is not None:
|
||||
total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
|
||||
if drop_only:
|
||||
return total_drop_due_to_issues
|
||||
for issue_id in all_issues:
|
||||
|
||||
if not any(errors[issue_id]):
|
||||
continue
|
||||
r, confidence, is_sign = pearson_corr(transitions, errors[issue_id])
|
||||
|
||||
if r is not None and drop is not None and is_sign:
|
||||
lost_conversions = int(r * affected_sessions[issue_id])
|
||||
else:
|
||||
lost_conversions = None
|
||||
if r is None:
|
||||
r = 0
|
||||
issues_dict['significant' if is_sign else 'insignificant'].append({
|
||||
"type": all_issues[issue_id]["issue_type"],
|
||||
"title": helper.get_issue_title(all_issues[issue_id]["issue_type"]),
|
||||
"affected_sessions": affected_sessions[issue_id],
|
||||
"unaffected_sessions": session_counts[1] - affected_sessions[issue_id],
|
||||
"lost_conversions": lost_conversions,
|
||||
"affected_users": affected_users_dict[issue_id],
|
||||
"conversion_impact": round(r * 100),
|
||||
"context_string": all_issues[issue_id]["context"],
|
||||
"issue_id": issue_id
|
||||
})
|
||||
|
||||
if is_sign:
|
||||
n_critical_issues += n_issues_dict[issue_id]
|
||||
# To limit the number of returned issues to the frontend
|
||||
issues_dict["significant"] = issues_dict["significant"][:20]
|
||||
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
|
||||
|
||||
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
|
||||
output = []
|
||||
stages = filter_d.events
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
logging.debug("no stages found")
|
||||
return output, 0
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output = [{
|
||||
"type": stages[0].type,
|
||||
"value": stages[0].value,
|
||||
"dropPercentage": None,
|
||||
"operator": stages[0].operator,
|
||||
"sessionsCount": 0,
|
||||
"dropPct": 0,
|
||||
"usersCount": 0,
|
||||
"dropDueToIssues": 0
|
||||
|
||||
}]
|
||||
# original
|
||||
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# first change
|
||||
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
|
||||
# project_id=project_id, user_id=None, count_only=True)
|
||||
# last change
|
||||
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
|
||||
project_id=project_id, user_id=None, count_only=True)
|
||||
output[0]["sessionsCount"] = counts["countSessions"]
|
||||
output[0]["usersCount"] = counts["countUsers"]
|
||||
return output, 0
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return get_stages(stages, []), 0
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
# Obtain the second part of the output
|
||||
total_drop_due_to_issues = get_issues(stages, rows,
|
||||
first_stage=1,
|
||||
last_stage=len(filter_d.events),
|
||||
drop_only=True)
|
||||
return stages_list, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
|
||||
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
|
||||
stages = filter_d.events
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return output
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
output['total_drop_due_to_issues'] = total_drop_due_to_issues
|
||||
# output['critical_issues_count'] = n_critical_issues
|
||||
output = {**output, **issues_dict}
|
||||
return output
|
||||
from .significance import *
|
||||
# TODO: use clickhouse for funnels
|
||||
|
|
|
|||
|
|
@ -10,17 +10,18 @@ from starlette.datastructures import FormData
|
|||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
||||
|
||||
API_PREFIX = "/api"
|
||||
SAML2 = {
|
||||
"strict": config("saml_strict", cast=bool, default=True),
|
||||
"debug": config("saml_debug", cast=bool, default=True),
|
||||
"sp": {
|
||||
"entityId": config("SITE_URL") + "/api/sso/saml2/metadata/",
|
||||
"entityId": config("SITE_URL") + API_PREFIX + "/sso/saml2/metadata/",
|
||||
"assertionConsumerService": {
|
||||
"url": config("SITE_URL") + "/api/sso/saml2/acs/",
|
||||
"url": config("SITE_URL") + API_PREFIX + "/sso/saml2/acs/",
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
|
||||
},
|
||||
"singleLogoutService": {
|
||||
"url": config("SITE_URL") + "/api/sso/saml2/sls/",
|
||||
"url": config("SITE_URL") + API_PREFIX + "/sso/saml2/sls/",
|
||||
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
|
||||
},
|
||||
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
|
|
@ -110,8 +111,8 @@ async def prepare_request(request: Request):
|
|||
# add / to /acs
|
||||
if not path.endswith("/"):
|
||||
path = path + '/'
|
||||
if not path.startswith("/api"):
|
||||
path = "/api" + path
|
||||
if len(API_PREFIX) > 0 and not path.startswith(API_PREFIX):
|
||||
path = API_PREFIX + path
|
||||
|
||||
return {
|
||||
'https': 'on' if proto == 'https' else 'off',
|
||||
|
|
@ -136,7 +137,13 @@ def get_saml2_provider():
|
|||
config("idp_name", default="saml2")) > 0 else None
|
||||
|
||||
|
||||
def get_landing_URL(jwt):
|
||||
def get_landing_URL(jwt, redirect_to_link2=False):
|
||||
if redirect_to_link2:
|
||||
if len(config("sso_landing_override", default="")) == 0:
|
||||
logging.warning("SSO trying to redirect to custom URL, but sso_landing_override env var is empty")
|
||||
else:
|
||||
return config("sso_landing_override") + "?jwt=%s" % jwt
|
||||
|
||||
return config("SITE_URL") + config("sso_landing", default="/login?jwt=%s") % jwt
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -41,8 +41,13 @@ class ClickHouseClient:
|
|||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logging.error("--------- CH EXCEPTION -----------")
|
||||
logging.error(err)
|
||||
logging.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logging.error(self.format(query=query, params=params))
|
||||
logging.error(self.format(query=query, params=params)
|
||||
.replace('\n', '\\n')
|
||||
.replace(' ', ' ')
|
||||
.replace(' ', ' '))
|
||||
logging.error("--------------------")
|
||||
raise err
|
||||
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ rm -rf ./chalicelib/core/saved_search.py
|
|||
rm -rf ./chalicelib/core/sessions.py
|
||||
rm -rf ./chalicelib/core/sessions_assignments.py
|
||||
rm -rf ./chalicelib/core/sessions_mobs.py
|
||||
rm -rf ./chalicelib/core/significance.py
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps.py
|
||||
rm -rf ./chalicelib/core/sourcemaps_parser.py
|
||||
|
|
|
|||
|
|
@ -16,12 +16,15 @@ mkdir .venv
|
|||
|
||||
# Installing dependencies (pipenv will detect the .venv folder and use it as a target)
|
||||
pipenv install -r requirements.txt [--skip-lock]
|
||||
|
||||
# These commands must bu used everytime you make changes to FOSS.
|
||||
# To clean the unused files before getting new ones
|
||||
bash clean.sh
|
||||
# To copy commun files from FOSS
|
||||
bash prepare-dev.sh
|
||||
|
||||
# In case of an issue with python3-saml installation for MacOS,
|
||||
# please follow these instructions:
|
||||
https://github.com/xmlsec/python-xmlsec/issues/254#issuecomment-1726249435
|
||||
```
|
||||
|
||||
### Building and deploying locally
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ apscheduler==3.10.4
|
|||
clickhouse-driver[lz4]==0.2.6
|
||||
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
|
||||
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
|
||||
#python3-saml==1.16.0 --no-binary=lxml
|
||||
python3-saml==1.16.0
|
||||
python3-saml==1.16.0 --no-binary=lxml
|
||||
#python3-saml==1.16.0
|
||||
python-multipart==0.0.6
|
||||
|
||||
redis==5.0.1
|
||||
|
|
|
|||
|
|
@ -514,8 +514,7 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
|
|||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId)
|
||||
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import HTTPException, Request, Response, status
|
||||
|
||||
from chalicelib.utils import SAML2_helper
|
||||
from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth
|
||||
from routers.base import get_routers
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -18,11 +20,11 @@ from starlette.responses import RedirectResponse
|
|||
|
||||
@public_app.get("/sso/saml2", tags=["saml2"])
|
||||
@public_app.get("/sso/saml2/", tags=["saml2"])
|
||||
async def start_sso(request: Request):
|
||||
async def start_sso(request: Request, iFrame: bool = False):
|
||||
request.path = ''
|
||||
req = await prepare_request(request=request)
|
||||
auth = init_saml_auth(req)
|
||||
sso_built_url = auth.login()
|
||||
sso_built_url = auth.login(return_to=json.dumps({'iFrame': iFrame}))
|
||||
return RedirectResponse(url=sso_built_url)
|
||||
|
||||
|
||||
|
|
@ -33,6 +35,29 @@ async def process_sso_assertion(request: Request):
|
|||
session = req["cookie"]["session"]
|
||||
auth = init_saml_auth(req)
|
||||
|
||||
post_data = req.get("post_data")
|
||||
if post_data is None:
|
||||
post_data = {}
|
||||
elif isinstance(post_data, str):
|
||||
post_data = json.loads(post_data)
|
||||
elif not isinstance(post_data, dict):
|
||||
logger.error("Received invalid post_data")
|
||||
logger.error("type: {}".format(type(post_data)))
|
||||
logger.error(post_data)
|
||||
post_data = {}
|
||||
|
||||
redirect_to_link2 = None
|
||||
relay_state = post_data.get('RelayState')
|
||||
if relay_state:
|
||||
if isinstance(relay_state, str):
|
||||
relay_state = json.loads(relay_state)
|
||||
elif not isinstance(relay_state, dict):
|
||||
logger.error("Received invalid relay_state")
|
||||
logger.error("type: {}".format(type(relay_state)))
|
||||
logger.error(relay_state)
|
||||
relay_state = {}
|
||||
redirect_to_link2 = relay_state.get("iFrame")
|
||||
|
||||
request_id = None
|
||||
if 'AuthNRequestID' in session:
|
||||
request_id = session['AuthNRequestID']
|
||||
|
|
@ -111,7 +136,7 @@ async def process_sso_assertion(request: Request):
|
|||
refresh_token_max_age = jwt["refreshTokenMaxAge"]
|
||||
response = Response(
|
||||
status_code=status.HTTP_302_FOUND,
|
||||
headers={'Location': SAML2_helper.get_landing_URL(jwt["jwt"])})
|
||||
headers={'Location': SAML2_helper.get_landing_URL(jwt["jwt"], redirect_to_link2=redirect_to_link2)})
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
|
@ -124,6 +149,8 @@ async def process_sso_assertion_tk(tenantKey: str, request: Request):
|
|||
session = req["cookie"]["session"]
|
||||
auth = init_saml_auth(req)
|
||||
|
||||
redirect_to_link2 = json.loads(req.get("post_data", {}) \
|
||||
.get('RelayState', '{}')).get("iFrame")
|
||||
request_id = None
|
||||
if 'AuthNRequestID' in session:
|
||||
request_id = session['AuthNRequestID']
|
||||
|
|
@ -194,9 +221,14 @@ async def process_sso_assertion_tk(tenantKey: str, request: Request):
|
|||
jwt = users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration)
|
||||
if jwt is None:
|
||||
return {"errors": ["null JWT"]}
|
||||
return Response(
|
||||
refresh_token = jwt["refreshToken"]
|
||||
refresh_token_max_age = jwt["refreshTokenMaxAge"]
|
||||
response = Response(
|
||||
status_code=status.HTTP_302_FOUND,
|
||||
headers={'Location': SAML2_helper.get_landing_URL(jwt)})
|
||||
headers={'Location': SAML2_helper.get_landing_URL(jwt["jwt"], redirect_to_link2=redirect_to_link2)})
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@public_app.get('/sso/saml2/sls', tags=["saml2"])
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ from fastapi import Body, Depends
|
|||
|
||||
from chalicelib.core.usability_testing import service
|
||||
from chalicelib.core.usability_testing.schema import UTTestCreate, UTTestUpdate, UTTestSearch
|
||||
from or_dependencies import OR_context, OR_role
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
from schemas import schemas
|
||||
|
||||
public_app, app, app_apikey = get_routers(extra_dependencies=[OR_role("owner", "admin")])
|
||||
public_app, app, app_apikey = get_routers()
|
||||
tags = ["usability-tests"]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -9,19 +9,20 @@
|
|||
|
||||
# Helper function
|
||||
exit_err() {
|
||||
err_code=$1
|
||||
if [[ $err_code != 0 ]]; then
|
||||
exit "$err_code"
|
||||
fi
|
||||
err_code=$1
|
||||
if [[ $err_code != 0 ]]; then
|
||||
exit "$err_code"
|
||||
fi
|
||||
}
|
||||
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
app="assist-stats" # Set the app variable to "chalice"
|
||||
app="assist-stats" # Set the app variable to "chalice"
|
||||
environment=$1
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-git_sha}
|
||||
envarg="default-foss"
|
||||
chart="$app" # Use the app variable here
|
||||
chart="$app" # Use the app variable here
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
|
|
@ -32,31 +33,31 @@ check_prereq() {
|
|||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
[[ $ee == "true" ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
[[ $ee == "true" ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
function build_api() {
|
||||
destination="_assist_stats"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_assist_stats_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../assist-stats ../${destination}
|
||||
cd ../${destination} || exit_err 100
|
||||
|
|
@ -86,5 +87,5 @@ check_prereq
|
|||
build_api $environment
|
||||
echo buil_complete
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release
|
||||
update_helm_release
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ const {
|
|||
socketsLiveByProject,
|
||||
socketsLiveBySession,
|
||||
autocomplete
|
||||
} = require('../utils/httpHandlers-cluster');
|
||||
} = require('../utils/httpHandlers');
|
||||
|
||||
const {createAdapter} = require("@socket.io/redis-adapter");
|
||||
const {createClient} = require("redis");
|
||||
|
|
|
|||
|
|
@ -1,176 +0,0 @@
|
|||
const {
|
||||
hasFilters,
|
||||
extractPeerId,
|
||||
isValidSession,
|
||||
sortPaginate,
|
||||
getValidAttributes,
|
||||
uniqueAutocomplete
|
||||
} = require("./helper");
|
||||
const {
|
||||
extractProjectKeyFromRequest,
|
||||
extractSessionIdFromRequest,
|
||||
extractPayloadFromRequest,
|
||||
getAvailableRooms
|
||||
} = require("./extractors");
|
||||
const {
|
||||
IDENTITIES
|
||||
} = require("./assistHelper");
|
||||
const {
|
||||
getServer
|
||||
} = require('../utils/wsServer');
|
||||
const {
|
||||
RecordRequestDuration,
|
||||
IncreaseTotalRequests
|
||||
} = require('../utils/metrics');
|
||||
|
||||
const debug_log = process.env.debug === "1";
|
||||
|
||||
const respond = function (req, res, data) {
|
||||
let result = {data}
|
||||
if (process.env.uws !== "true") {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify(result));
|
||||
}
|
||||
const duration = performance.now() - req.startTs;
|
||||
IncreaseTotalRequests();
|
||||
RecordRequestDuration(req.method.toLowerCase(), res.handlerName, 200, duration/1000.0);
|
||||
}
|
||||
|
||||
const socketsListByProject = async function (req, res) {
|
||||
debug_log && console.log("[WS]looking for available sessions");
|
||||
res.handlerName = 'socketsListByProject';
|
||||
|
||||
let io = getServer();
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let _sessionId = extractSessionIdFromRequest(req);
|
||||
if (_sessionId === undefined) {
|
||||
return respond(req, res, null);
|
||||
}
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
|
||||
let connected_sockets = await io.in(_projectKey + '-' + _sessionId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo
|
||||
&& isValidSession(item.handshake.query.sessionInfo, filters.filter)) {
|
||||
return respond(req, res, _sessionId);
|
||||
}
|
||||
}
|
||||
respond(req, res, null);
|
||||
}
|
||||
|
||||
const socketsLiveByProject = async function (req, res) {
|
||||
debug_log && console.log("[WS]looking for available LIVE sessions");
|
||||
res.handlerName = 'socketsLiveByProject';
|
||||
|
||||
let io = getServer();
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let _sessionId = extractSessionIdFromRequest(req);
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
let withFilters = hasFilters(filters);
|
||||
let liveSessions = new Set();
|
||||
const sessIDs = new Set();
|
||||
|
||||
let rooms = await getAvailableRooms(io);
|
||||
for (let roomId of rooms.keys()) {
|
||||
let {projectKey, sessionId} = extractPeerId(roomId);
|
||||
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
|
||||
let connected_sockets = await io.in(roomId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session) {
|
||||
if (withFilters) {
|
||||
if (item.handshake.query.sessionInfo &&
|
||||
isValidSession(item.handshake.query.sessionInfo, filters.filter) &&
|
||||
!sessIDs.has(item.handshake.query.sessionInfo.sessionID)
|
||||
) {
|
||||
liveSessions.add(item.handshake.query.sessionInfo);
|
||||
sessIDs.add(item.handshake.query.sessionInfo.sessionID);
|
||||
}
|
||||
} else {
|
||||
if (!sessIDs.has(item.handshake.query.sessionInfo.sessionID)) {
|
||||
liveSessions.add(item.handshake.query.sessionInfo);
|
||||
sessIDs.add(item.handshake.query.sessionInfo.sessionID);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let sessions = Array.from(liveSessions);
|
||||
respond(req, res, _sessionId === undefined ? sortPaginate(sessions, filters) : sessions.length > 0 ? sessions[0] : null);
|
||||
}
|
||||
|
||||
const socketsLiveBySession = async function (req, res) {
|
||||
debug_log && console.log("[WS]looking for LIVE session");
|
||||
res.handlerName = 'socketsLiveBySession';
|
||||
|
||||
let io = getServer();
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let _sessionId = extractSessionIdFromRequest(req);
|
||||
if (_sessionId === undefined) {
|
||||
return respond(req, res, null);
|
||||
}
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
let withFilters = hasFilters(filters);
|
||||
let liveSessions = new Set();
|
||||
const sessIDs = new Set();
|
||||
|
||||
let connected_sockets = await io.in(_projectKey + '-' + _sessionId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session) {
|
||||
if (withFilters) {
|
||||
if (item.handshake.query.sessionInfo &&
|
||||
isValidSession(item.handshake.query.sessionInfo, filters.filter) &&
|
||||
!sessIDs.has(item.handshake.query.sessionInfo.sessionID)
|
||||
) {
|
||||
liveSessions.add(item.handshake.query.sessionInfo);
|
||||
sessIDs.add(item.handshake.query.sessionInfo.sessionID);
|
||||
}
|
||||
} else {
|
||||
if (!sessIDs.has(item.handshake.query.sessionInfo.sessionID)) {
|
||||
liveSessions.add(item.handshake.query.sessionInfo);
|
||||
sessIDs.add(item.handshake.query.sessionInfo.sessionID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
let sessions = Array.from(liveSessions);
|
||||
respond(req, res, sessions.length > 0 ? sessions[0] : null);
|
||||
}
|
||||
|
||||
const autocomplete = async function (req, res) {
|
||||
debug_log && console.log("[WS]autocomplete");
|
||||
res.handlerName = 'autocomplete';
|
||||
|
||||
let io = getServer();
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let filters = await extractPayloadFromRequest(req);
|
||||
let results = [];
|
||||
|
||||
if (filters.query && Object.keys(filters.query).length > 0) {
|
||||
let rooms = await getAvailableRooms(io);
|
||||
for (let roomId of rooms.keys()) {
|
||||
let {projectKey} = extractPeerId(roomId);
|
||||
if (projectKey === _projectKey) {
|
||||
let connected_sockets = await io.in(roomId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.handshake.query.identity === IDENTITIES.session && item.handshake.query.sessionInfo) {
|
||||
results = [...results, ...getValidAttributes(item.handshake.query.sessionInfo, filters.query)];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
respond(req, res, uniqueAutocomplete(results));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
respond,
|
||||
socketsListByProject,
|
||||
socketsLiveByProject,
|
||||
socketsLiveBySession,
|
||||
autocomplete
|
||||
}
|
||||
|
|
@ -50,10 +50,10 @@ async function postData(payload) {
|
|||
|
||||
function startAssist(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = `${socket.sessId}_${agentID}_assist_${tsNow}`;
|
||||
const eventID = `${socket.handshake.query.sessId}_${agentID}_assist_${tsNow}`;
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "assist",
|
||||
|
|
@ -61,20 +61,20 @@ function startAssist(socket, agentID) {
|
|||
"timestamp": tsNow,
|
||||
});
|
||||
// Save uniq eventID to cache
|
||||
cache.set(`${socket.sessId}_${agentID}_assist`, eventID);
|
||||
cache.set(`${socket.handshake.query.sessId}_${agentID}_assist`, eventID);
|
||||
// Debug log
|
||||
debug && console.log(`assist_started, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${tsNow}`);
|
||||
debug && console.log(`assist_started, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${tsNow}`);
|
||||
}
|
||||
|
||||
function endAssist(socket, agentID) {
|
||||
const eventID = cache.get(`${socket.sessId}_${agentID}_assist`);
|
||||
const eventID = cache.get(`${socket.handshake.query.sessId}_${agentID}_assist`);
|
||||
if (eventID === undefined) {
|
||||
debug && console.log(`have to skip assist_ended, no eventID in the cache, agentID: ${socket.agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}`);
|
||||
debug && console.log(`have to skip assist_ended, no eventID in the cache, agentID: ${socket.handshake.query.agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}`);
|
||||
return
|
||||
}
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "assist",
|
||||
|
|
@ -82,17 +82,17 @@ function endAssist(socket, agentID) {
|
|||
"timestamp": +new Date(),
|
||||
})
|
||||
// Remove eventID from cache
|
||||
cache.delete(`${socket.sessId}_${agentID}_assist`);
|
||||
cache.delete(`${socket.handshake.query.sessId}_${agentID}_assist`);
|
||||
// Debug logs
|
||||
debug && console.log(`assist_ended, agentID: ${socket.agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}`);
|
||||
debug && console.log(`assist_ended, agentID: ${socket.handshake.query.agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}`);
|
||||
}
|
||||
|
||||
function startCall(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = `${socket.sessId}_${agentID}_call_${tsNow}`;
|
||||
const eventID = `${socket.handshake.query.sessId}_${agentID}_call_${tsNow}`;
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "call",
|
||||
|
|
@ -100,102 +100,102 @@ function startCall(socket, agentID) {
|
|||
"timestamp": tsNow,
|
||||
});
|
||||
// Save uniq eventID to cache
|
||||
cache.set(`${socket.sessId}_call`, eventID);
|
||||
cache.set(`${socket.handshake.query.sessId}_call`, eventID);
|
||||
// Debug logs
|
||||
debug && console.log(`s_call_started, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${tsNow}`);
|
||||
debug && console.log(`s_call_started, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${tsNow}`);
|
||||
}
|
||||
|
||||
function endCall(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = cache.get(`${socket.sessId}_call`);
|
||||
const eventID = cache.get(`${socket.handshake.query.sessId}_call`);
|
||||
if (eventID === undefined) {
|
||||
debug && console.log(`have to skip s_call_ended, no eventID in the cache, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${tsNow}`);
|
||||
debug && console.log(`have to skip s_call_ended, no eventID in the cache, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${tsNow}`);
|
||||
return
|
||||
}
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "call",
|
||||
"event_state": "end",
|
||||
"timestamp": tsNow,
|
||||
});
|
||||
cache.delete(`${socket.sessId}_call`)
|
||||
cache.delete(`${socket.handshake.query.sessId}_call`)
|
||||
// Debug logs
|
||||
debug && console.log(`s_call_ended, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${tsNow}`);
|
||||
debug && console.log(`s_call_ended, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${tsNow}`);
|
||||
}
|
||||
|
||||
function startControl(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = `${socket.sessId}_${agentID}_control_${tsNow}`;
|
||||
const eventID = `${socket.handshake.query.sessId}_${agentID}_control_${tsNow}`;
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "control",
|
||||
"event_state": "start",
|
||||
"timestamp": tsNow,
|
||||
});
|
||||
cache.set(`${socket.sessId}_control`, eventID)
|
||||
cache.set(`${socket.handshake.query.sessId}_control`, eventID)
|
||||
// Debug logs
|
||||
debug && console.log(`s_control_started, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${+new Date()}`);
|
||||
debug && console.log(`s_control_started, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${+new Date()}`);
|
||||
}
|
||||
|
||||
function endControl(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = cache.get(`${socket.sessId}_control`);
|
||||
const eventID = cache.get(`${socket.handshake.query.sessId}_control`);
|
||||
if (eventID === undefined) {
|
||||
debug && console.log(`have to skip s_control_ended, no eventID in the cache, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${tsNow}`);
|
||||
debug && console.log(`have to skip s_control_ended, no eventID in the cache, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${tsNow}`);
|
||||
return
|
||||
}
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "control",
|
||||
"event_state": "end",
|
||||
"timestamp": tsNow,
|
||||
});
|
||||
cache.delete(`${socket.sessId}_control`)
|
||||
cache.delete(`${socket.handshake.query.sessId}_control`)
|
||||
// Debug logs
|
||||
debug && console.log(`s_control_ended, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${+new Date()}`);
|
||||
debug && console.log(`s_control_ended, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${+new Date()}`);
|
||||
}
|
||||
|
||||
function startRecord(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = `${socket.sessId}_${agentID}_record_${tsNow}`;
|
||||
const eventID = `${socket.handshake.query.sessId}_${agentID}_record_${tsNow}`;
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "record",
|
||||
"event_state": "start",
|
||||
"timestamp": tsNow,
|
||||
});
|
||||
cache.set(`${socket.sessId}_record`, eventID)
|
||||
cache.set(`${socket.handshake.query.sessId}_record`, eventID)
|
||||
// Debug logs
|
||||
debug && console.log(`s_recording_started, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${+new Date()}`);
|
||||
debug && console.log(`s_recording_started, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${+new Date()}`);
|
||||
}
|
||||
|
||||
function endRecord(socket, agentID) {
|
||||
const tsNow = +new Date();
|
||||
const eventID = cache.get(`${socket.sessId}_record`);
|
||||
void postData({
|
||||
"project_id": socket.projectId,
|
||||
"session_id": socket.sessId,
|
||||
"project_id": socket.handshake.query.projectId,
|
||||
"session_id": socket.handshake.query.sessId,
|
||||
"agent_id": agentID,
|
||||
"event_id": eventID,
|
||||
"event_type": "record",
|
||||
"event_state": "end",
|
||||
"timestamp": tsNow,
|
||||
});
|
||||
cache.delete(`${socket.sessId}_record`)
|
||||
cache.delete(`${socket.handshake.query.sessId}_record`)
|
||||
// Debug logs
|
||||
debug && console.log(`s_recording_ended, agentID: ${agentID}, sessID: ${socket.sessId}, projID: ${socket.projectId}, time: ${+new Date()}`);
|
||||
debug && console.log(`s_recording_ended, agentID: ${agentID}, sessID: ${socket.handshake.query.sessId}, projID: ${socket.handshake.query.projectId}, time: ${+new Date()}`);
|
||||
}
|
||||
|
||||
function handleEvent(eventName, socket, agentID) {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,54 @@ const getServer = function () {
|
|||
return io;
|
||||
}
|
||||
|
||||
let redisClient;
|
||||
const useRedis = process.env.redis === "true";
|
||||
|
||||
if (useRedis) {
|
||||
const {createClient} = require("redis");
|
||||
const REDIS_URL = (process.env.REDIS_URL || "localhost:6379").replace(/((^\w+:|^)\/\/|^)/, 'redis://');
|
||||
redisClient = createClient({url: REDIS_URL});
|
||||
redisClient.on("error", (error) => console.error(`Redis error : ${error}`));
|
||||
void redisClient.connect();
|
||||
}
|
||||
|
||||
const processSocketsList = function (sockets) {
|
||||
let res = []
|
||||
for (let socket of sockets) {
|
||||
let {handshake} = socket;
|
||||
res.push({handshake});
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
const doFetchAllSockets = async function () {
|
||||
if (useRedis) {
|
||||
try {
|
||||
let cachedResult = await redisClient.get('fetchSocketsResult');
|
||||
if (cachedResult) {
|
||||
return JSON.parse(cachedResult);
|
||||
}
|
||||
let result = await io.fetchSockets();
|
||||
let cachedString = JSON.stringify(processSocketsList(result));
|
||||
await redisClient.set('fetchSocketsResult', cachedString, {EX: 5});
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('Error setting value with expiration:', error);
|
||||
}
|
||||
}
|
||||
return await io.fetchSockets();
|
||||
}
|
||||
|
||||
const fetchSockets = async function (roomID) {
|
||||
if (!io) {
|
||||
return [];
|
||||
}
|
||||
if (!roomID) {
|
||||
return await doFetchAllSockets();
|
||||
}
|
||||
return await io.in(roomID).fetchSockets();
|
||||
}
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (io) {
|
||||
return io;
|
||||
|
|
@ -41,4 +89,5 @@ const createSocketIOServer = function (server, prefix) {
|
|||
module.exports = {
|
||||
createSocketIOServer,
|
||||
getServer,
|
||||
fetchSockets,
|
||||
}
|
||||
|
|
@ -117,8 +117,8 @@ var batches = map[string]string{
|
|||
// Web
|
||||
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?)",
|
||||
"resources": "INSERT INTO experimental.resources (session_id, project_id, message_id, datetime, url, type, duration, ttfb, header_size, encoded_body_size, decoded_body_size, success) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)",
|
||||
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, ?)",
|
||||
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
|
||||
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
|
|
@ -354,7 +354,7 @@ func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messa
|
|||
uint16(session.ProjectID),
|
||||
msg.MessageID,
|
||||
datetime(msg.Timestamp),
|
||||
url.DiscardURLQuery(msg.URL),
|
||||
msg.URL,
|
||||
nullableUint16(uint16(msg.RequestStart)),
|
||||
nullableUint16(uint16(msg.ResponseStart)),
|
||||
nullableUint16(uint16(msg.ResponseEnd)),
|
||||
|
|
|
|||
27
ee/backend/pkg/objectstorage/s3/tags.go
Normal file
27
ee/backend/pkg/objectstorage/s3/tags.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
package s3
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"os"
|
||||
)
|
||||
|
||||
func tagging(useTags bool) *string {
|
||||
if !useTags {
|
||||
return nil
|
||||
}
|
||||
tag := loadFileTag()
|
||||
return &tag
|
||||
}
|
||||
|
||||
func loadFileTag() string {
|
||||
// Load file tag from env
|
||||
key := "retention"
|
||||
value := os.Getenv("RETENTION")
|
||||
if value == "" {
|
||||
value = "default"
|
||||
}
|
||||
// Create URL encoded tag set for file
|
||||
params := url.Values{}
|
||||
params.Add(key, value)
|
||||
return params.Encode()
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit b00a461a6582196d8f488c73465f6c87f384a052
|
||||
|
|
@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
|
|||
MINIO_SECRET_KEY = ''
|
||||
|
||||
# APP and TRACKER VERSIONS
|
||||
VERSION = 1.16.0
|
||||
VERSION = 1.17.6
|
||||
TRACKER_VERSION = '11.0.1'
|
||||
|
|
|
|||
|
|
@ -1,224 +0,0 @@
|
|||
import React from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import { Icon, Loader, Button, Link, Input, Form, Popover, Tooltip } from 'UI';
|
||||
import { login } from 'Duck/user';
|
||||
import { forgotPassword, signup } from 'App/routes';
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import { withRouter } from 'react-router-dom';
|
||||
import stl from './login.module.css';
|
||||
import cn from 'classnames';
|
||||
import { setJwt } from 'Duck/user';
|
||||
import LoginBg from '../../svg/login-illustration.svg';
|
||||
import { ENTERPRISE_REQUEIRED } from 'App/constants';
|
||||
import { fetchTenants } from 'Duck/user';
|
||||
import Copyright from 'Shared/Copyright';
|
||||
|
||||
const FORGOT_PASSWORD = forgotPassword();
|
||||
const SIGNUP_ROUTE = signup();
|
||||
const recaptchaRef = React.createRef();
|
||||
|
||||
export default
|
||||
@connect(
|
||||
(state, props) => ({
|
||||
errors: state.getIn(['user', 'loginRequest', 'errors']),
|
||||
loading: state.getIn(['user', 'loginRequest', 'loading']),
|
||||
authDetails: state.getIn(['user', 'authDetails']),
|
||||
params: new URLSearchParams(props.location.search),
|
||||
}),
|
||||
{ login, setJwt, fetchTenants }
|
||||
)
|
||||
@withPageTitle('Login - OpenReplay')
|
||||
@withRouter
|
||||
class Login extends React.Component {
|
||||
state = {
|
||||
email: '',
|
||||
password: '',
|
||||
CAPTCHA_ENABLED: window.env.CAPTCHA_ENABLED === 'true',
|
||||
};
|
||||
|
||||
static getDerivedStateFromProps(nextProps, prevState) {
|
||||
const { authDetails } = nextProps;
|
||||
if (Object.keys(authDetails).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!authDetails.tenants) {
|
||||
nextProps.history.push(SIGNUP_ROUTE);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
const { params } = this.props;
|
||||
this.props.fetchTenants();
|
||||
const jwt = params.get('jwt');
|
||||
if (jwt) {
|
||||
this.props.setJwt(jwt);
|
||||
}
|
||||
}
|
||||
|
||||
handleSubmit = (token) => {
|
||||
const { email, password } = this.state;
|
||||
this.props.login({ email: email.trim(), password, 'g-recaptcha-response': token });
|
||||
};
|
||||
|
||||
onSubmit = (e) => {
|
||||
e.preventDefault();
|
||||
const { CAPTCHA_ENABLED } = this.state;
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
this.handleSubmit();
|
||||
}
|
||||
};
|
||||
|
||||
write = ({ target: { value, name } }) => this.setState({ [name]: value });
|
||||
|
||||
render() {
|
||||
const { errors, loading, authDetails } = this.props;
|
||||
const { CAPTCHA_ENABLED } = this.state;
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center h-screen">
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="m-10 ">
|
||||
<img src="/assets/logo.svg" width={200} />
|
||||
</div>
|
||||
<div className="border rounded bg-white">
|
||||
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
|
||||
Login to your account
|
||||
</h2>
|
||||
<div className={cn({'hidden': authDetails.enforceSSO})}>
|
||||
<Form
|
||||
onSubmit={this.onSubmit}
|
||||
className={cn('flex items-center justify-center flex-col')}
|
||||
style={{ width: '350px' }}
|
||||
>
|
||||
<Loader loading={loading}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={window.env.CAPTCHA_SITE_KEY}
|
||||
onChange={(token) => this.handleSubmit(token)}
|
||||
/>
|
||||
)}
|
||||
<div style={{ width: '350px' }} className="px-8">
|
||||
<Form.Field>
|
||||
<label>Email Address</label>
|
||||
<Input
|
||||
autoFocus
|
||||
data-test-id={'login'}
|
||||
autoFocus={true}
|
||||
autoComplete="username"
|
||||
type="email"
|
||||
placeholder="e.g. john@example.com"
|
||||
name="email"
|
||||
onChange={this.write}
|
||||
required
|
||||
icon="envelope"
|
||||
/>
|
||||
</Form.Field>
|
||||
<Form.Field>
|
||||
<label className="mb-2">Password</label>
|
||||
<Input
|
||||
data-test-id={'password'}
|
||||
autoComplete="current-password"
|
||||
type="password"
|
||||
placeholder="Password"
|
||||
name="password"
|
||||
onChange={this.write}
|
||||
required
|
||||
icon="key"
|
||||
/>
|
||||
</Form.Field>
|
||||
</div>
|
||||
</Loader>
|
||||
{errors && errors.length ? (
|
||||
<div className="px-8 my-2 w-full">
|
||||
{errors.map((error) => (
|
||||
<div className="flex items-center bg-red-lightest rounded p-3">
|
||||
<Icon name="info" color="red" size="20" />
|
||||
<span className="color-red ml-2">
|
||||
{error}
|
||||
<br />
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
<div className="px-8 w-full">
|
||||
<Button
|
||||
data-test-id={'log-button'}
|
||||
className="mt-2 w-full text-center"
|
||||
type="submit"
|
||||
variant="primary"
|
||||
>
|
||||
{'Login'}
|
||||
</Button>
|
||||
|
||||
<div className="my-8 text-center">
|
||||
<span className="color-gray-medium">Having trouble logging in?</span>{' '}
|
||||
<Link to={FORGOT_PASSWORD} className="link ml-1">
|
||||
{'Reset password'}
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href="/api/sso/saml2" rel="noopener noreferrer">
|
||||
<Button variant="text-primary" type="submit">
|
||||
{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
delay={0}
|
||||
title={
|
||||
<div className="text-center">
|
||||
{authDetails.edition === 'ee' ? (
|
||||
<span>
|
||||
SSO has not been configured. <br /> Please reach out to your admin.
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
variant="text-primary"
|
||||
type="submit"
|
||||
className="pointer-events-none opacity-30"
|
||||
>
|
||||
{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className={cn("flex items-center w-96 justify-center my-8", { 'hidden' : !authDetails.enforceSSO})}>
|
||||
<a href="/api/sso/saml2" rel="noopener noreferrer">
|
||||
<Button variant="primary">{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}</Button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Copyright />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
229
frontend/app/components/Login/Login.tsx
Normal file
229
frontend/app/components/Login/Login.tsx
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
import React, {useState, useEffect, useRef} from 'react';
|
||||
// import {useSelector, useDispatch} from 'react-redux';
|
||||
import {useHistory, useLocation} from 'react-router-dom';
|
||||
import {login, setJwt, fetchTenants} from 'Duck/user';
|
||||
import withPageTitle from 'HOCs/withPageTitle'; // Consider using a different approach for titles in functional components
|
||||
import ReCAPTCHA from 'react-google-recaptcha';
|
||||
import {Button, Form, Input, Link, Loader, Popover, Tooltip, Icon} from 'UI';
|
||||
import {forgotPassword, signup} from 'App/routes';
|
||||
import LoginBg from '../../svg/login-illustration.svg';
|
||||
import {ENTERPRISE_REQUEIRED} from 'App/constants';
|
||||
import cn from 'classnames';
|
||||
import stl from './login.module.css';
|
||||
import Copyright from 'Shared/Copyright';
|
||||
import {connect} from 'react-redux';
|
||||
|
||||
const FORGOT_PASSWORD = forgotPassword();
|
||||
const SIGNUP_ROUTE = signup();
|
||||
|
||||
interface LoginProps {
|
||||
errors: any; // Adjust the type based on your state shape
|
||||
loading: boolean;
|
||||
authDetails: any; // Adjust the type based on your state shape
|
||||
login: typeof login;
|
||||
setJwt: typeof setJwt;
|
||||
fetchTenants: typeof fetchTenants;
|
||||
location: Location;
|
||||
}
|
||||
|
||||
const Login: React.FC<LoginProps> = ({errors, loading, authDetails, login, setJwt, fetchTenants, location}) => {
|
||||
const [email, setEmail] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
const [CAPTCHA_ENABLED, setCAPTCHA_ENABLED] = useState(window.env.CAPTCHA_ENABLED === 'true');
|
||||
const recaptchaRef = useRef<ReCAPTCHA>(null);
|
||||
|
||||
const history = useHistory();
|
||||
const params = new URLSearchParams(location.search);
|
||||
|
||||
useEffect(() => {
|
||||
if (Object.keys(authDetails).length !== 0) {
|
||||
if (!authDetails.tenants) {
|
||||
history.push(SIGNUP_ROUTE);
|
||||
}
|
||||
}
|
||||
}, [authDetails]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchTenants()
|
||||
const jwt = params.get('jwt');
|
||||
if (jwt) {
|
||||
setJwt(jwt);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleSubmit = (token?: string) => {
|
||||
login({email: email.trim(), password, 'g-recaptcha-response': token});
|
||||
};
|
||||
|
||||
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
if (CAPTCHA_ENABLED && recaptchaRef.current) {
|
||||
recaptchaRef.current.execute();
|
||||
} else if (!CAPTCHA_ENABLED) {
|
||||
handleSubmit();
|
||||
}
|
||||
};
|
||||
|
||||
const onSSOClick = () => {
|
||||
if (window !== window.top) { // if in iframe
|
||||
window.parent.location.href = `${window.location.origin}/api/sso/saml2?iFrame=true`;
|
||||
} else {
|
||||
window.location.href = `${window.location.origin}/api/sso/saml2`;
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center h-screen">
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="m-10 ">
|
||||
<img src="/assets/logo.svg" width={200}/>
|
||||
</div>
|
||||
<div className="border rounded bg-white">
|
||||
<h2 className="text-center text-2xl font-medium mb-6 border-b p-5 w-full">
|
||||
Login to your account
|
||||
</h2>
|
||||
<div className={cn({'hidden': authDetails.enforceSSO})}>
|
||||
<Form
|
||||
onSubmit={onSubmit}
|
||||
className={cn('flex items-center justify-center flex-col')}
|
||||
style={{width: '350px'}}
|
||||
>
|
||||
<Loader loading={loading}>
|
||||
{CAPTCHA_ENABLED && (
|
||||
<ReCAPTCHA
|
||||
ref={recaptchaRef}
|
||||
size="invisible"
|
||||
sitekey={window.env.CAPTCHA_SITE_KEY}
|
||||
onChange={(token) => handleSubmit(token)}
|
||||
/>
|
||||
)}
|
||||
<div style={{width: '350px'}} className="px-8">
|
||||
<Form.Field>
|
||||
<label>Email Address</label>
|
||||
<Input
|
||||
data-test-id={'login'}
|
||||
autoFocus={true}
|
||||
autoComplete="username"
|
||||
type="email"
|
||||
placeholder="e.g. john@example.com"
|
||||
name="email"
|
||||
onChange={(e) => setEmail(e.target.value)}
|
||||
required
|
||||
icon="envelope"
|
||||
/>
|
||||
</Form.Field>
|
||||
<Form.Field>
|
||||
<label className="mb-2">Password</label>
|
||||
<Input
|
||||
data-test-id={'password'}
|
||||
autoComplete="current-password"
|
||||
type="password"
|
||||
placeholder="Password"
|
||||
name="password"
|
||||
onChange={(e) => setPassword(e.target.value)}
|
||||
required
|
||||
icon="key"
|
||||
/>
|
||||
</Form.Field>
|
||||
</div>
|
||||
</Loader>
|
||||
{errors && errors.length ? (
|
||||
<div className="px-8 my-2 w-full">
|
||||
{errors.map((error) => (
|
||||
<div className="flex items-center bg-red-lightest rounded p-3">
|
||||
<Icon name="info" color="red" size="20"/>
|
||||
<span className="color-red ml-2">{error}<br/></span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
<div className="px-8 w-full">
|
||||
<Button
|
||||
data-test-id={'log-button'}
|
||||
className="mt-2 w-full text-center"
|
||||
type="submit"
|
||||
variant="primary"
|
||||
>
|
||||
{'Login'}
|
||||
</Button>
|
||||
|
||||
<div className="my-8 text-center">
|
||||
<span className="color-gray-medium">Having trouble logging in?</span>{' '}
|
||||
<Link to={FORGOT_PASSWORD} className="link ml-1">
|
||||
{'Reset password'}
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
|
||||
<div className={cn(stl.sso, 'py-2 flex flex-col items-center')}>
|
||||
{authDetails.sso ? (
|
||||
<a href="#" rel="noopener noreferrer" onClick={onSSOClick}>
|
||||
<Button variant="text-primary" type="submit">
|
||||
{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</a>
|
||||
) : (
|
||||
<Tooltip
|
||||
delay={0}
|
||||
title={
|
||||
<div className="text-center">
|
||||
{authDetails.edition === 'ee' ? (
|
||||
<span>
|
||||
SSO has not been configured. <br/> Please reach out to your admin.
|
||||
</span>
|
||||
) : (
|
||||
ENTERPRISE_REQUEIRED
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
variant="text-primary"
|
||||
type="submit"
|
||||
className="pointer-events-none opacity-30"
|
||||
>
|
||||
{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn("flex items-center w-96 justify-center my-8", {'hidden': !authDetails.enforceSSO})}>
|
||||
<a href="#" rel="noopener noreferrer" onClick={onSSOClick}>
|
||||
<Button variant="primary">{`Login with SSO ${
|
||||
authDetails.ssoProvider ? `(${authDetails.ssoProvider})` : ''
|
||||
}`}</Button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Copyright/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const mapStateToProps = (state: any, ownProps: any) => ({
|
||||
errors: state.getIn(['user', 'loginRequest', 'errors']),
|
||||
loading: state.getIn(['user', 'loginRequest', 'loading']),
|
||||
authDetails: state.getIn(['user', 'authDetails']),
|
||||
params: new URLSearchParams(ownProps.location.search),
|
||||
});
|
||||
|
||||
const mapDispatchToProps = {
|
||||
login,
|
||||
setJwt,
|
||||
fetchTenants,
|
||||
};
|
||||
|
||||
export default withPageTitle('Login - OpenReplay')(
|
||||
connect(mapStateToProps, mapDispatchToProps)(Login)
|
||||
);
|
||||
|
|
@ -107,7 +107,7 @@ function LivePlayer({
|
|||
useEffect(() => {
|
||||
const queryParams = new URLSearchParams(window.location.search);
|
||||
if (
|
||||
(queryParams.has('fullScreen') && queryParams.get('fullScreen') === 'true') ||
|
||||
(queryParams.has('fullScreen') && queryParams.get('fullScreen') === 'true') || (queryParams.has('fullView') && queryParams.get('fullView') === 'true') ||
|
||||
location.pathname.includes('multiview')
|
||||
) {
|
||||
setFullView(true);
|
||||
|
|
|
|||
|
|
@ -22,7 +22,8 @@ function ReplayWindow({ videoURL, userDevice }: Props) {
|
|||
React.useEffect(() => {
|
||||
if (videoRef.current) {
|
||||
const timeSecs = time / 1000
|
||||
if (videoRef.current.duration >= timeSecs) {
|
||||
const delta = videoRef.current.currentTime - timeSecs
|
||||
if (videoRef.current.duration >= timeSecs && Math.abs(delta) > 0.1) {
|
||||
videoRef.current.currentTime = timeSecs
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import React from 'react';
|
||||
import React, { useEffect } from 'react';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import cn from 'classnames';
|
||||
import styles from 'Components/Session_/session.module.css';
|
||||
|
|
@ -22,6 +22,7 @@ interface IProps {
|
|||
|
||||
function PlayerContent({ session, fullscreen, activeTab, setActiveTab }: IProps) {
|
||||
const { store } = React.useContext(PlayerContext)
|
||||
const [fullView, setFullView] = React.useState(false)
|
||||
|
||||
const {
|
||||
error,
|
||||
|
|
@ -29,6 +30,11 @@ function PlayerContent({ session, fullscreen, activeTab, setActiveTab }: IProps)
|
|||
|
||||
const hasError = !!error
|
||||
|
||||
useEffect(() => {
|
||||
const isFullView = new URLSearchParams(location.search).get('fullview')
|
||||
setFullView(isFullView === 'true');
|
||||
}, [session.sessionId]);
|
||||
|
||||
const sessionDays = countDaysFrom(session.startedAt);
|
||||
return (
|
||||
<div className="relative">
|
||||
|
|
@ -60,7 +66,7 @@ function PlayerContent({ session, fullscreen, activeTab, setActiveTab }: IProps)
|
|||
style={activeTab && !fullscreen ? { maxWidth: 'calc(100% - 270px)' } : undefined}
|
||||
>
|
||||
<div className={cn(styles.session, 'relative')} data-fullscreen={fullscreen}>
|
||||
<PlayerBlock activeTab={activeTab} />
|
||||
<PlayerBlock activeTab={activeTab} fullView={fullView} />
|
||||
</div>
|
||||
</div>
|
||||
{activeTab !== '' && (
|
||||
|
|
|
|||
|
|
@ -19,11 +19,11 @@ import { toast } from 'react-toastify';
|
|||
const TABS = {
|
||||
EVENTS: 'Activity',
|
||||
CLICKMAP: 'Click Map',
|
||||
INSPECTOR: 'Tag',
|
||||
INSPECTOR: 'Tag'
|
||||
};
|
||||
const UXTTABS = {
|
||||
EVENTS: TABS.EVENTS
|
||||
}
|
||||
};
|
||||
|
||||
let playerInst: IPlayerContext['player'] | undefined;
|
||||
|
||||
|
|
@ -36,6 +36,7 @@ function WebPlayer(props: any) {
|
|||
// @ts-ignore
|
||||
const [contextValue, setContextValue] = useState<IPlayerContext>(defaultContextValue);
|
||||
const params: { sessionId: string } = useParams();
|
||||
const [fullView, setFullView] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
playerInst = undefined;
|
||||
|
|
@ -115,15 +116,20 @@ function WebPlayer(props: any) {
|
|||
|
||||
useEffect(() => {
|
||||
if (uxtestingStore.isUxt()) {
|
||||
setActiveTab('EVENTS')
|
||||
setActiveTab('EVENTS');
|
||||
}
|
||||
}, [uxtestingStore.isUxt()])
|
||||
}, [uxtestingStore.isUxt()]);
|
||||
|
||||
const onNoteClose = () => {
|
||||
setNoteItem(undefined);
|
||||
contextValue.player.play();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const isFullView = new URLSearchParams(location.search).get('fullview')
|
||||
setFullView(isFullView === 'true');
|
||||
}, [session.sessionId]);
|
||||
|
||||
if (!session.sessionId)
|
||||
return (
|
||||
<Loader
|
||||
|
|
@ -133,20 +139,22 @@ function WebPlayer(props: any) {
|
|||
top: '50%',
|
||||
left: '50%',
|
||||
transform: 'translateX(-50%)',
|
||||
height: 75,
|
||||
height: 75
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<PlayerContext.Provider value={contextValue}>
|
||||
<PlayerBlockHeader
|
||||
// @ts-ignore TODO?
|
||||
activeTab={activeTab}
|
||||
setActiveTab={setActiveTab}
|
||||
tabs={uxtestingStore.isUxt() ? UXTTABS : TABS}
|
||||
fullscreen={fullscreen}
|
||||
/>
|
||||
{!fullView && (
|
||||
<PlayerBlockHeader
|
||||
// @ts-ignore TODO?
|
||||
activeTab={activeTab}
|
||||
setActiveTab={setActiveTab}
|
||||
tabs={uxtestingStore.isUxt() ? UXTTABS : TABS}
|
||||
fullscreen={fullscreen}
|
||||
/>
|
||||
)}
|
||||
{/* @ts-ignore */}
|
||||
{contextValue.player ? (
|
||||
<PlayerContent
|
||||
|
|
@ -178,11 +186,11 @@ export default connect(
|
|||
fullscreen: state.getIn(['components', 'player', 'fullscreen']),
|
||||
showEvents: state.get('showEvents'),
|
||||
members: state.getIn(['members', 'list']),
|
||||
startedAt: state.getIn(['sessions', 'current']).startedAt || 0,
|
||||
startedAt: state.getIn(['sessions', 'current']).startedAt || 0
|
||||
}),
|
||||
{
|
||||
toggleFullscreen,
|
||||
closeBottomBlock,
|
||||
fetchList,
|
||||
fetchList
|
||||
}
|
||||
)(withLocationHandlers()(observer(WebPlayer)));
|
||||
|
|
|
|||
|
|
@ -71,6 +71,6 @@ function AutoplayTimer({ nextId, siteId, history }: IProps) {
|
|||
export default withRouter(
|
||||
connect((state: any) => ({
|
||||
siteId: state.getIn(['site', 'siteId']),
|
||||
nextId: parseInt(state.getIn(['sessions', 'nextId'])),
|
||||
nextId: state.getIn(['sessions', 'nextId']),
|
||||
}))(AutoplayTimer)
|
||||
);
|
||||
|
|
|
|||
|
|
@ -2,8 +2,6 @@ import { VElement } from "Player/web/managers/DOM/VirtualDOM";
|
|||
|
||||
export default class CanvasManager {
|
||||
private fileData: string | undefined;
|
||||
private canvasEl: HTMLVideoElement
|
||||
private canvasCtx: CanvasRenderingContext2D | null = null;
|
||||
private videoTag = document.createElement('video')
|
||||
private lastTs = 0;
|
||||
|
||||
|
|
@ -38,10 +36,6 @@ export default class CanvasManager {
|
|||
this.videoTag.setAttribute('crossorigin', 'anonymous');
|
||||
this.videoTag.src = this.fileData;
|
||||
this.videoTag.currentTime = 0;
|
||||
|
||||
const node = this.getNode(parseInt(this.nodeId, 10)) as unknown as VElement
|
||||
this.canvasCtx = (node.node as HTMLCanvasElement).getContext('2d');
|
||||
this.canvasEl = node.node as HTMLVideoElement;
|
||||
}
|
||||
|
||||
move(t: number) {
|
||||
|
|
@ -49,11 +43,14 @@ export default class CanvasManager {
|
|||
this.lastTs = t;
|
||||
const playTime = t - this.delta
|
||||
if (playTime > 0) {
|
||||
const node = this.getNode(parseInt(this.nodeId, 10)) as unknown as VElement
|
||||
const canvasCtx = (node.node as HTMLCanvasElement).getContext('2d');
|
||||
const canvasEl = node.node as HTMLVideoElement;
|
||||
if (!this.videoTag.paused) {
|
||||
void this.videoTag.pause()
|
||||
}
|
||||
this.videoTag.currentTime = playTime/1000;
|
||||
this.canvasCtx?.drawImage(this.videoTag, 0, 0, this.canvasEl.width, this.canvasEl.height);
|
||||
canvasCtx?.drawImage(this.videoTag, 0, 0, canvasEl.width, canvasEl.height);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
import logger from 'App/logger';
|
||||
|
||||
export function insertRule(
|
||||
sheet: { insertRule: (rule: string, index: number) => void },
|
||||
sheet: { insertRule: (rule: string, index?: number) => void },
|
||||
msg: { rule: string, index: number }
|
||||
) {
|
||||
|
||||
try {
|
||||
sheet.insertRule(msg.rule, msg.index)
|
||||
} catch (e) {
|
||||
logger.warn(e, msg)
|
||||
try {
|
||||
sheet.insertRule(msg.rule, 0) // TODO: index renumeration in case of subsequent rule deletion
|
||||
logger.warn("Inserting rule into 0-index", e, msg)
|
||||
sheet.insertRule(msg.rule)
|
||||
} catch (e) {
|
||||
logger.warn("Cannot insert rule.", e, msg)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import Logger from 'App/logger'
|
|||
|
||||
// TODO: composition instead of inheritance
|
||||
// needSkipMessage() and next() methods here use buf and p protected properties,
|
||||
// which should be probably somehow incapsulated
|
||||
export default class MFileReader extends RawMessageReader {
|
||||
private pLastMessageID: number = 0
|
||||
private currentTime: number
|
||||
|
|
@ -15,20 +14,19 @@ export default class MFileReader extends RawMessageReader {
|
|||
private noIndexes: boolean = false
|
||||
constructor(data: Uint8Array, private startTime?: number, private logger= console) {
|
||||
super(data)
|
||||
// if (noIndexes) this.noIndexes = true
|
||||
}
|
||||
|
||||
public checkForIndexes() {
|
||||
// 0xff 0xff 0xff 0xff 0xff 0xff 0xff 0xff = no indexes + weird failover (don't ask)
|
||||
// 0xff 0xff 0xff 0xff 0xff 0xff 0xff 0xff = no indexes + weird fail over (don't ask)
|
||||
const skipIndexes = this.readCustomIndex(this.buf.slice(0, 8)) === 72057594037927940
|
||||
|| this.readCustomIndex(this.buf.slice(0, 9)) === 72057594037927940
|
||||
|| this.readCustomIndex(this.buf.slice(0, 9)) === 72057594037927940
|
||||
|
||||
if (skipIndexes) {
|
||||
if (!this.noIndexes) {
|
||||
this.skip(8)
|
||||
}
|
||||
this.noIndexes = true
|
||||
this.skip(8)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
private needSkipMessage(): boolean {
|
||||
|
|
@ -49,17 +47,19 @@ export default class MFileReader extends RawMessageReader {
|
|||
return id
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the messages from byteArray, returns null if read ended
|
||||
* will reset to last correct pointer if encountered bad read
|
||||
* (i.e mobfile was split in two parts and it encountered partial message)
|
||||
* then will proceed to read next message when next mobfile part will be added
|
||||
* via super.append
|
||||
* */
|
||||
private readRawMessage(): RawMessage | null {
|
||||
if (!this.noIndexes) this.skip(8)
|
||||
try {
|
||||
const msg = super.readMessage()
|
||||
if (!msg) {
|
||||
this.skip(-8)
|
||||
}
|
||||
return msg
|
||||
return super.readMessage()
|
||||
} catch (e) {
|
||||
this.error = true
|
||||
this.logger.error("Read message error:", e)
|
||||
this.error = true
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
|
@ -102,6 +102,7 @@ export default class MFileReader extends RawMessageReader {
|
|||
|
||||
const index = this.noIndexes ? 0 : this.getLastMessageID()
|
||||
const msg = Object.assign(rewriteMessage(rMsg), {
|
||||
// @ts-ignore
|
||||
time: this.currentTime ?? rMsg.timestamp - this.startTime!,
|
||||
tabId: this.currentTab,
|
||||
}, !this.noIndexes ? { _index: index } : {})
|
||||
|
|
|
|||
|
|
@ -913,7 +913,6 @@ export default class RawMessageReader extends PrimitiveReader {
|
|||
|
||||
default:
|
||||
throw new Error(`Unrecognizable message type: ${ tp }; Pointer at the position ${this.p} of ${this.buf.length}`)
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,7 +91,6 @@ const getFiltersFromEntries = (entries: any) => {
|
|||
}
|
||||
|
||||
filter.value = valueArr;
|
||||
filter.operator = operator;
|
||||
if (filter.icon === 'filters/metadata') {
|
||||
filter.source = filter.type;
|
||||
filter.type = 'MULTIPLE';
|
||||
|
|
@ -101,6 +100,7 @@ const getFiltersFromEntries = (entries: any) => {
|
|||
}
|
||||
}
|
||||
|
||||
filter.operator = operator;
|
||||
if (!filter.filters || filter.filters.size === 0) {
|
||||
// TODO support subfilters in url
|
||||
filters.push(filter);
|
||||
|
|
|
|||
|
|
@ -8,6 +8,11 @@
|
|||
# Example
|
||||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh
|
||||
|
||||
ARCH=${ARCH:-amd64}
|
||||
|
||||
GIT_ROOT=$(git rev-parse --show-toplevel)
|
||||
source $GIT_ROOT/scripts/lib/_docker.sh
|
||||
|
||||
git_sha=$(git rev-parse --short HEAD)
|
||||
image_tag=${IMAGE_TAG:-$git_sha}
|
||||
check_prereq() {
|
||||
|
|
@ -17,34 +22,42 @@ check_prereq() {
|
|||
}
|
||||
}
|
||||
|
||||
# Sourcing init scripts
|
||||
for file in ./build_init_*; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Sourcing $file"
|
||||
source "$file"
|
||||
fi
|
||||
done
|
||||
|
||||
chart=frontend
|
||||
[[ $1 == ee ]] && ee=true
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
__app_version="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/${chart} | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}' | cut -d 'v' -f2)"
|
||||
sed -i "s/^VERSION = .*/VERSION = $__app_version/g" .env.sample
|
||||
image_tag="v${__app_version}"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
__app_version="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/${chart} | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}' | cut -d 'v' -f2)"
|
||||
sed -i "s/^VERSION = .*/VERSION = $__app_version/g" .env.sample
|
||||
image_tag="v${__app_version}"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
[[ $ee == true ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"v${__app_version}\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add .env.sample
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
[[ $ee == true ]] && return
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"v${__app_version}\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add .env.sample
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
# https://github.com/docker/cli/issues/1134#issuecomment-613516912
|
||||
export DOCKER_BUILDKIT=1
|
||||
function build(){
|
||||
function build() {
|
||||
# Run docker as the same user, else we'll run in to permission issues.
|
||||
docker build -t ${DOCKER_REPO:-'local'}/frontend:${image_tag} --platform linux/amd64 --build-arg GIT_SHA=$git_sha .
|
||||
docker build -t ${DOCKER_REPO:-'local'}/frontend:${image_tag} --platform linux/${ARCH} --build-arg GIT_SHA=$git_sha .
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/frontend:${image_tag}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,35 +14,36 @@ check_prereq() {
|
|||
exit 1
|
||||
}
|
||||
}
|
||||
source ../scripts/lib/_docker.sh
|
||||
|
||||
[[ $1 == ee ]] && ee=true
|
||||
|
||||
[[ $PATCH -eq 1 ]] && {
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
image_tag="$(grep -ER ^.ppVersion ../scripts/helmcharts/openreplay/charts/$chart | xargs | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
[[ $ee == "true" ]] && {
|
||||
image_tag="${image_tag}-ee"
|
||||
}
|
||||
}
|
||||
update_helm_release() {
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
chart=$1
|
||||
HELM_TAG="$(grep -iER ^version ../scripts/helmcharts/openreplay/charts/$chart | awk '{print $2}' | awk -F. -v OFS=. '{$NF += 1 ; print}')"
|
||||
# Update the chart version
|
||||
sed -i "s#^version.*#version: $HELM_TAG# g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Update image tags
|
||||
sed -i "s#ppVersion.*#ppVersion: \"$image_tag\"#g" ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
# Commit the changes
|
||||
git add ../scripts/helmcharts/openreplay/charts/$chart/Chart.yaml
|
||||
git commit -m "chore(helm): Updating $chart image release"
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
function build_api() {
|
||||
destination="_peers"
|
||||
[[ $1 == "ee" ]] && {
|
||||
destination="_peers_ee"
|
||||
}
|
||||
[[ -d ../${destination} ]] && {
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
echo "Removing previous build cache"
|
||||
rm -rf ../${destination}
|
||||
}
|
||||
cp -R ../peers ../${destination}
|
||||
cd ../${destination}
|
||||
|
|
@ -68,5 +69,5 @@ function build_api(){
|
|||
check_prereq
|
||||
build_api $1
|
||||
if [[ $PATCH -eq 1 ]]; then
|
||||
update_helm_release peers
|
||||
update_helm_release peers
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ COMMON_JWT_SECRET="change_me_jwt"
|
|||
COMMON_S3_KEY="change_me_s3_key"
|
||||
COMMON_S3_SECRET="change_me_s3_secret"
|
||||
COMMON_PG_PASSWORD="change_me_pg_password"
|
||||
COMMON_VERSION="v1.16.0"
|
||||
COMMON_VERSION="v1.17.0"
|
||||
## DB versions
|
||||
######################################
|
||||
POSTGRES_VERSION="14.5.0"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ services:
|
|||
image: bitnami/postgresql:${POSTGRES_VERSION}
|
||||
container_name: postgres
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
- pgdata:/bitnami/postgresql
|
||||
networks:
|
||||
- openreplay-net
|
||||
environment:
|
||||
|
|
@ -15,7 +15,7 @@ services:
|
|||
image: bitnami/redis:${REDIS_VERSION}
|
||||
container_name: redis
|
||||
volumes:
|
||||
- redisdata:/var/lib/postgresql/data
|
||||
- redisdata:/bitnami/redis/data
|
||||
networks:
|
||||
- openreplay-net
|
||||
environment:
|
||||
|
|
@ -25,7 +25,7 @@ services:
|
|||
image: bitnami/minio:${MINIO_VERSION}
|
||||
container_name: minio
|
||||
volumes:
|
||||
- miniodata:/bitnami/minio/data
|
||||
- miniodata:/data
|
||||
networks:
|
||||
- openreplay-net
|
||||
ports:
|
||||
|
|
|
|||
|
|
@ -55,11 +55,24 @@ info "Grabbing latest apt caches"
|
|||
sudo apt update
|
||||
|
||||
# setup docker
|
||||
info "Setting up Docker"
|
||||
sudo apt install docker.io docker-compose -y
|
||||
# Check if Docker is already installed
|
||||
if ! command -v docker &> /dev/null; then
|
||||
info "Setting up Docker"
|
||||
sudo apt install docker.io -y
|
||||
|
||||
# enable docker without sudo
|
||||
sudo usermod -aG docker "${USER}" || true
|
||||
# enable docker without sudo
|
||||
sudo usermod -aG docker "${USER}" || true
|
||||
else
|
||||
echo "Docker is already installed. Skipping Docker installation."
|
||||
fi
|
||||
|
||||
# Check if Docker Compose is already installed
|
||||
if ! command -v docker-compose &>/dev/null && ! command -v docker compose &>/dev/null; then
|
||||
info "Setting up Docker Compose"
|
||||
sudo apt install docker-compose -y
|
||||
else
|
||||
echo "Docker Compose is already installed. Skipping Docker Compose installation."
|
||||
fi
|
||||
|
||||
# Prompt for DOMAIN_NAME input
|
||||
echo -e "${GREEN}Please provide your domain name.${NC}"
|
||||
|
|
@ -115,8 +128,16 @@ case $yn in
|
|||
exit 1;;
|
||||
esac
|
||||
|
||||
sudo -E docker-compose --parallel 1 pull
|
||||
sudo -E docker-compose --profile migration up --force-recreate --build -d
|
||||
if command -v docker-compose >/dev/null 2>&1; then
|
||||
# Docker Compose V1 is installed.
|
||||
sudo -E docker-compose --parallel 1 pull
|
||||
sudo -E docker-compose --profile migration up --force-recreate --build -d
|
||||
else
|
||||
# Docker Compose V2 or higher is installed.
|
||||
sudo -E docker compose --parallel 1 pull
|
||||
sudo -E docker compose --profile migration up --force-recreate --build -d
|
||||
fi
|
||||
|
||||
cp common.env common.env.bak
|
||||
echo "🎉🎉🎉 Done! 🎉🎉🎉"
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue